Compare commits
1583 Commits
web/config
...
docs-scrip
Author | SHA1 | Date | |
---|---|---|---|
78bc5687ef | |||
52b1e8de13 | |||
73a9957925 | |||
90824b966f | |||
4c9f977361 | |||
9143197ef5 | |||
e20eaac56e | |||
085ab3c2dd | |||
c0063c1749 | |||
ee58cf0c1c | |||
1f2654f25f | |||
1dec9bde3c | |||
259537ee34 | |||
303ba13791 | |||
7a74baaf8e | |||
e0a2bd2cd5 | |||
7d3dda171d | |||
ad2c2eecb8 | |||
df3de9218c | |||
6f8460c4fe | |||
68172c388f | |||
d4f0aba430 | |||
ef78ee3440 | |||
1cf4ccdde5 | |||
11d41a2ee4 | |||
8db1d86c6b | |||
8f7fe8e744 | |||
e0dbd8c53f | |||
ba38fb6ff6 | |||
639f7d53c1 | |||
de9d8a40f6 | |||
9e32c78c8c | |||
91cb2ef883 | |||
ad5e447b3d | |||
d5ce530179 | |||
f6a9773930 | |||
7721d4d0c2 | |||
a5e45ba78e | |||
96f04d32ea | |||
d7bac37ba8 | |||
c3509382de | |||
c09d36a1f7 | |||
db2b5f035f | |||
2a74225775 | |||
b8188708dc | |||
2a5a76e369 | |||
4d3ec68494 | |||
b8560f2a86 | |||
f312250dc9 | |||
aef8b49f6b | |||
2ae08aa1c6 | |||
a8488ee35a | |||
a3e015f835 | |||
0f36defbb9 | |||
8de811a56f | |||
7328b8e3a6 | |||
6312a20bcc | |||
631537851e | |||
b93759f318 | |||
9ca3ff9e49 | |||
6aefe2d143 | |||
5c8a9abb33 | |||
d6e7cb2cff | |||
50d66eda9e | |||
de37748f17 | |||
c579b5c36c | |||
07040bb344 | |||
59ffcdd34d | |||
d30b87a2bb | |||
f3d5e659eb | |||
95463dd76a | |||
c702b0fd07 | |||
3d6e1f9d33 | |||
bfc2fe7703 | |||
14e8d1d0f4 | |||
5103eebfad | |||
210befd1ea | |||
37fe20e576 | |||
26732389a1 | |||
0fe751269e | |||
a824fda339 | |||
d8fefb1c7b | |||
f4d8191dec | |||
3338a79ef0 | |||
9c9581a3de | |||
fbad6fb2a4 | |||
24e74e0263 | |||
763a19b914 | |||
b8006068e9 | |||
16c7155268 | |||
ef2fb6d5fc | |||
bac0237ce0 | |||
038170314d | |||
d198176ff9 | |||
186d023c94 | |||
cc85806b39 | |||
c345a77c1a | |||
11d0acf9dd | |||
2fc0c6e0ad | |||
6f31c802c2 | |||
2de2189985 | |||
e4c8e30814 | |||
8bd514e17d | |||
d758e6065b | |||
42e0ff6492 | |||
1789ed67e1 | |||
56d02e94f1 | |||
7b64620615 | |||
fc39323ea0 | |||
00427b20e8 | |||
18e3e7c875 | |||
c7d8bbace0 | |||
07ad795972 | |||
77a83d75b7 | |||
c846c8089a | |||
453f7b8641 | |||
cb7b44e751 | |||
e9c79df56b | |||
82bf113b5e | |||
e5c41fc928 | |||
d30defc6fa | |||
c76313a5aa | |||
31b62f6b3b | |||
7e78fe5802 | |||
bde509a5b9 | |||
0510200d8c | |||
6c96bc7dfe | |||
29f3e2789d | |||
f4ef8f5739 | |||
b94db1d10d | |||
dc93a941fb | |||
e6012fe2d4 | |||
90fb64006d | |||
b50cdd674f | |||
24910d1b4d | |||
8f26708bde | |||
dca3391554 | |||
7392065e35 | |||
52388dadf5 | |||
c517d4690a | |||
136b7dea54 | |||
cb178f7f2d | |||
2d565f1943 | |||
98c8402f11 | |||
8915904cc7 | |||
ca960f3286 | |||
7c5314eb66 | |||
97a8d8ba60 | |||
4f06ecbee6 | |||
1b19b3dd11 | |||
9458c486c5 | |||
1fe0f474bd | |||
326b89fe88 | |||
755851d774 | |||
17a58a1095 | |||
361765cd8d | |||
8adbba606c | |||
cb4bc5bd87 | |||
861992f576 | |||
f827c269da | |||
80072f3781 | |||
e0ac3dedf4 | |||
f220060ea2 | |||
7c22e9cfa9 | |||
e7241927e5 | |||
059f81735e | |||
eff85e489c | |||
0caa8cf0fa | |||
778bd9ba63 | |||
632f098a34 | |||
40b15ded2a | |||
17acc9457d | |||
843431cfed | |||
7cadcbfa06 | |||
2996f20b74 | |||
d575eb8dcc | |||
dd86a90225 | |||
db5168480c | |||
3b1034b9a2 | |||
3086d15f08 | |||
a1153d3393 | |||
f40feaf95f | |||
ba87fd8714 | |||
ccebe355aa | |||
94de772a3d | |||
5f4e49f586 | |||
6bd1b9fa32 | |||
22215bc887 | |||
9b618ca54b | |||
cc18f352aa | |||
49fe670932 | |||
422eb0890c | |||
8626d7a7aa | |||
53a44435f8 | |||
f1d173f94e | |||
e25ceb5041 | |||
72a6f369ef | |||
ef16328005 | |||
89fafbd6ad | |||
d8a855e16d | |||
f9cd3b9b34 | |||
909c227894 | |||
0690dffa58 | |||
48ff62d502 | |||
988131fb57 | |||
19e0a282c6 | |||
234f06a362 | |||
b3340507a2 | |||
94482daa97 | |||
942019d31f | |||
54743e8187 | |||
833d20349e | |||
1b2816eb14 | |||
e6fd7cc1fc | |||
0bbbc7def2 | |||
43fd3eecda | |||
631b120e4f | |||
c7c7e28cef | |||
9ea517d606 | |||
5201a37214 | |||
a448107feb | |||
7b7a7e3073 | |||
1eb103994d | |||
ae97eac780 | |||
3c7abaef01 | |||
2def291773 | |||
7f17c5f212 | |||
2bd7b80579 | |||
340c3241e6 | |||
ca3cdc3fd2 | |||
c23ee42d9a | |||
0cc853e272 | |||
6e12277903 | |||
2c781ae423 | |||
81540fdae5 | |||
22a6acf56c | |||
29c0d1d5f4 | |||
2b17947b64 | |||
2f42144b33 | |||
eef02f2892 | |||
7c3eb96525 | |||
ff06214b0e | |||
bdde6775bc | |||
57399c2907 | |||
4df1b50542 | |||
c5e0e5e09d | |||
8d7305caec | |||
9448d06301 | |||
e8a1929993 | |||
9edece404c | |||
a636df2863 | |||
5ee24bd28b | |||
8e7ae07f13 | |||
40c1934ed9 | |||
d7e9c86672 | |||
016299dd41 | |||
b6157ecaf1 | |||
49e7c5b873 | |||
6a79b3a4af | |||
35cd126406 | |||
f89a4fc276 | |||
ea76295976 | |||
4d7f380b2d | |||
6e98c9a6a9 | |||
cb8379031a | |||
45bc16286c | |||
c0521762bc | |||
0c604ceba4 | |||
30e39c75ff | |||
6d7bebbcc3 | |||
dc332ec7b0 | |||
31e94a2814 | |||
eb08214f0e | |||
a5ab8a618e | |||
b8cbdcae22 | |||
ae86184511 | |||
b704388c2f | |||
a35f9fdd7b | |||
d95220be0e | |||
ba1b86efa1 | |||
cd93de1141 | |||
cc148bd552 | |||
8f82dac84e | |||
89c08f2588 | |||
113d6cc45f | |||
05cfbca5f2 | |||
385f9bcbac | |||
86bc5b4cdb | |||
8d5d9f35ef | |||
439169d5e7 | |||
e405b33f9f | |||
a9c13d4d10 | |||
12b16b17a2 | |||
3473abee32 | |||
b5a0b3a521 | |||
837a0325ca | |||
b1050e8825 | |||
7bb90b1661 | |||
8f755785ea | |||
adc0ec8ee4 | |||
9379b3d178 | |||
18eef8ae05 | |||
f0e22fd08b | |||
ce5297a6cd | |||
74c8df8782 | |||
2d897fac48 | |||
2eb47c9efa | |||
9852041f38 | |||
7f74936212 | |||
4f40b1e27c | |||
041e407153 | |||
31b891428e | |||
4ab8247847 | |||
af24edf8c1 | |||
f8bfd12e31 | |||
993fc4b77b | |||
3ee2be09bf | |||
b3e3948f44 | |||
117a5cd88d | |||
342a40212e | |||
15ae11d9d5 | |||
e11df56f21 | |||
3771af5282 | |||
0718053c56 | |||
3e5014bfea | |||
78f49ddc04 | |||
744bc54231 | |||
44ba30ad75 | |||
63b991f137 | |||
7f9acb8a2b | |||
ce3ba32044 | |||
25e6a69331 | |||
78d07cc355 | |||
3c4df47fe3 | |||
8ed1805cb8 | |||
4d23db73ca | |||
72783953fb | |||
3b0fdb3dbd | |||
23161eed12 | |||
8918427588 | |||
5d858020f6 | |||
198e8b98a8 | |||
88e9c9b669 | |||
0c652a210d | |||
105a90d2e7 | |||
68f5abe318 | |||
59d4c18636 | |||
b67e2a1144 | |||
fc025651ce | |||
7cedc840b0 | |||
5ba731e48b | |||
c792534a50 | |||
a136fd8b54 | |||
fb63c1f3e9 | |||
0b15ab3f27 | |||
f9fd67c2b8 | |||
4ac19f9d44 | |||
74d3e92bac | |||
207fa13405 | |||
208e4a8bed | |||
ee245ab390 | |||
60c8837082 | |||
6cf418a37e | |||
254761e930 | |||
d85f8758fe | |||
84bfb37b60 | |||
578ff13868 | |||
d12acb5bcd | |||
0e8b9a6409 | |||
6171443e61 | |||
5fedd616d9 | |||
5dd6498694 | |||
cf5102ed20 | |||
d3b2032c33 | |||
1e5df1c405 | |||
96eabe269c | |||
3e869a0ec7 | |||
7276a416f6 | |||
a989390533 | |||
562c52a48b | |||
c3cb9bc778 | |||
5f65a7c6cc | |||
95d26563e7 | |||
1cac1492d7 | |||
6c1ac48bd9 | |||
97f11f7aa8 | |||
6db763f7dc | |||
16b5f692ee | |||
80c1bd690c | |||
040dcaa9d6 | |||
66a16752e4 | |||
70c0e1be99 | |||
5beea4624f | |||
50fffa72cc | |||
dae4bf0d6b | |||
823851652e | |||
ae7f7c9930 | |||
5ce4ed4dd3 | |||
5582cc7745 | |||
c384ed5f52 | |||
02e2ba8971 | |||
925d5c80df | |||
1de69a7bd6 | |||
c6979a48e0 | |||
6e73d60305 | |||
f388cac07c | |||
cf593e5cb9 | |||
c3a98e5d5f | |||
1048729599 | |||
72442b37e5 | |||
211cdb3a21 | |||
4cca16750e | |||
b2d261dd1c | |||
0663100429 | |||
66c3261eeb | |||
bf7570bc36 | |||
20b52d0dbd | |||
a1f5e284c4 | |||
0e4737d38f | |||
609b10f7f8 | |||
2cff3d15e7 | |||
4f1d49417c | |||
0766a47b4f | |||
bd1ddfebd6 | |||
a841743c74 | |||
0974456ac8 | |||
d44d5a44a1 | |||
edf5c8686a | |||
70ace8b209 | |||
c3509e63af | |||
89b8206176 | |||
908d87c142 | |||
4ab4e81fb0 | |||
6dae1a4fe7 | |||
d11de73e95 | |||
b08fb5fdf1 | |||
3c9e8c7287 | |||
691d0be41e | |||
dfbaccbab6 | |||
f3bdb189f6 | |||
85b3523639 | |||
9ff61a7120 | |||
f742b986a7 | |||
177bdfa689 | |||
c3445374c2 | |||
c2da6822dc | |||
493294ef9f | |||
17f807e8b0 | |||
96eb98500c | |||
ddd75f6d09 | |||
fbad02fac1 | |||
fbab822db1 | |||
d8316eea9b | |||
8182c9f7c2 | |||
5d94b97e97 | |||
35ddbb6d75 | |||
2b8bc38fc3 | |||
9b0b504531 | |||
c312430007 | |||
4e65c205e3 | |||
372a66c876 | |||
3630349388 | |||
347746cbcd | |||
ef2e1ad27b | |||
8a6b34eb5c | |||
26f72bcac4 | |||
f04466b3be | |||
4ba53d2f08 | |||
7a13046a27 | |||
939e2c1edd | |||
cf06b4177a | |||
f8079d63fa | |||
576a56c562 | |||
cf9b14213e | |||
73cbdb77ed | |||
fd66be9fa2 | |||
96bf9ee898 | |||
6c4c535d57 | |||
0ed4bba5a5 | |||
6e31e5b889 | |||
a5467c6e19 | |||
09832355e3 | |||
6ffef878f0 | |||
644090dc58 | |||
d07508b9a4 | |||
44d7e81a93 | |||
2e91b9d035 | |||
964c6a1050 | |||
90a1c5ab85 | |||
8162c1ec86 | |||
ab46610d9b | |||
6909b58279 | |||
6d7a06227f | |||
1459a13991 | |||
1921ce39f6 | |||
263cff6393 | |||
5a61688472 | |||
919b56c466 | |||
db7bc8b7ad | |||
5768cb5858 | |||
5b77bc33c7 | |||
93650e925a | |||
83823068fe | |||
d922f41438 | |||
ef3d545d7a | |||
e9efbb2161 | |||
a3634ab21d | |||
f28209548b | |||
9589e04072 | |||
6490703ba3 | |||
ca2fce05f5 | |||
9dc813d9ab | |||
833c66a9dd | |||
5d54f696d4 | |||
0746652995 | |||
a1a55c644a | |||
fce57d258e | |||
17e30b7adc | |||
c5b3f8a578 | |||
3a3619fa18 | |||
21b933efff | |||
97fc2cba69 | |||
7ef627d476 | |||
d16c603499 | |||
99a69bb52f | |||
ac8192d660 | |||
cdf3449230 | |||
ef2a40ed7d | |||
09cacbd76b | |||
cb33f0d1e2 | |||
90af4b29a6 | |||
6b9158591e | |||
9c15cda191 | |||
046b8d5cbf | |||
8b74b83983 | |||
8de038b387 | |||
2edc651582 | |||
85594a119c | |||
1a97ccea03 | |||
99ad492951 | |||
ff4ec6f9b4 | |||
0c49de67b8 | |||
0d73528ec7 | |||
80ca4e5722 | |||
1ebe200a46 | |||
5683c81f27 | |||
f0477309d5 | |||
04d613d213 | |||
b5928c2f7f | |||
c8e7247d2c | |||
ac6266a23a | |||
88213f67ee | |||
f8fd17f77e | |||
7f127ee515 | |||
ed214b4ac8 | |||
aeb1b450eb | |||
18b4b2d7b2 | |||
a140bad8fb | |||
bb1b8ab7bb | |||
6802614fbf | |||
619113e810 | |||
a8697bf1ad | |||
f52dec4b7e | |||
6560bf18a4 | |||
315cd40e6a | |||
a7a62b5005 | |||
37e3998211 | |||
31be26ebbd | |||
42b1cb06fb | |||
066ec35adf | |||
87a808a747 | |||
d8b1cd757e | |||
b1b9c8e0e5 | |||
a0a617055b | |||
9ec6f548a6 | |||
46980db582 | |||
d8fd1ddec6 | |||
74d29e2374 | |||
801a28ef65 | |||
3fff090612 | |||
b071d55b4d | |||
244cbc5b6d | |||
74da359dd5 | |||
56b73e3bd5 | |||
59e3c85568 | |||
746c933e63 | |||
f165bbca5d | |||
f335b08ec2 | |||
6e831a4253 | |||
6c1687c569 | |||
09c64e2354 | |||
0a312821ee | |||
06d1062423 | |||
dcfa3dc88a | |||
c45bb8e985 | |||
3e4fea875a | |||
c7670d271a | |||
570f3a4d42 | |||
3c54e94c6e | |||
26daaeb57d | |||
a60442fc2c | |||
8790f7059a | |||
49cf10e9bd | |||
13da6f5151 | |||
a1e0564f8f | |||
55f3664063 | |||
baabd8614f | |||
79df24f4eb | |||
f1afc4d263 | |||
643a256f01 | |||
b7f92ef0ea | |||
e33ca93f05 | |||
79af8b8638 | |||
d2b8bd3635 | |||
02e01559f4 | |||
b0c39e4843 | |||
039570a140 | |||
fdc7dedc58 | |||
098fcdeaf2 | |||
3cf9278bea | |||
13ccb352d7 | |||
c5b099856d | |||
6d912be7f6 | |||
0c54d266d3 | |||
c4784cf383 | |||
44ccbe2fdf | |||
d2615f0d6a | |||
5ab3cf4952 | |||
1926a472cd | |||
d220ca6bab | |||
759ea731bf | |||
e01fd5eb1a | |||
e716e24ec6 | |||
e9c84b8bfb | |||
130adf9d26 | |||
6aab505cd7 | |||
a9c597bc08 | |||
853239dff9 | |||
8f8c3e4944 | |||
dde9960b9c | |||
b1e48a6c1a | |||
b704e9031e | |||
15ef5dc792 | |||
6c4a1850b0 | |||
183d036f3c | |||
b324dc0ce2 | |||
6ad7be65ec | |||
8bf335a2a5 | |||
45709770f4 | |||
6158dd80ca | |||
468d26c587 | |||
c39a97ca58 | |||
8f0810ebb3 | |||
98e0f12d17 | |||
8d37e83df7 | |||
a306bb8384 | |||
c80116475b | |||
2997382df2 | |||
65e48907d3 | |||
1c4848ed8f | |||
64f7fa62dd | |||
16abaa8016 | |||
4cc4a3e4b8 | |||
8abe1f61ea | |||
6712095d7e | |||
5ab308bfd7 | |||
8b93fbcc69 | |||
f641670139 | |||
80af26ef50 | |||
64ce170882 | |||
b6171aa1a4 | |||
087582abbd | |||
6b6d88b81b | |||
55e5d36df5 | |||
fc43e841c9 | |||
895ed6fbdc | |||
f3965261c5 | |||
34ee6dc2b7 | |||
55fe4b0bc0 | |||
8d745609f9 | |||
55edb10da0 | |||
66e4b3af36 | |||
d44fc7790e | |||
291972628a | |||
019221c433 | |||
b99fa9f8f8 | |||
5bde2772c3 | |||
10884a7770 | |||
e858d09d28 | |||
856717395e | |||
b7793200de | |||
bcc0323523 | |||
643c1f5bbf | |||
1fca246839 | |||
b73e68a94c | |||
f9d3c4c9a7 | |||
53f8699deb | |||
6f3dc2eafd | |||
567ed07fe8 | |||
2999e9d006 | |||
b32a228e3a | |||
5a2dfb23c6 | |||
8ebce479bd | |||
81589e835e | |||
22b1f39b91 | |||
c25e982f1f | |||
d5c09fae8a | |||
bf15e04053 | |||
0932622567 | |||
0a5b8bea5d | |||
64d4a19ccf | |||
82875cfc0e | |||
83776b9f08 | |||
a742331484 | |||
2e9df96a62 | |||
9f5d7089c3 | |||
ddc78cc297 | |||
cb9b3407d8 | |||
d7b872c1e0 | |||
c35217f581 | |||
3b73a2eb9d | |||
3b94ffa705 | |||
936102f6d9 | |||
8c687d81aa | |||
01d7263484 | |||
49ac0eb662 | |||
8935ca65a7 | |||
58a374d1f1 | |||
f409831921 | |||
951acb26dd | |||
2df0c95806 | |||
f8d1b7b9b7 | |||
e092aabb21 | |||
48c59a815d | |||
9f40716a87 | |||
39da241298 | |||
a71a87fa3e | |||
176fe2f6fc | |||
4544f475c9 | |||
5bbf59b2bd | |||
1b2f1db711 | |||
14fab991b4 | |||
444e0642d0 | |||
89c841b530 | |||
d3ea465d86 | |||
5c38e03820 | |||
3a400da931 | |||
149481f787 | |||
ba368552f2 | |||
fbf0e4a966 | |||
2f7d9a44ad | |||
cad5ff36bd | |||
5024c757fa | |||
68af2e5352 | |||
d21c87eda0 | |||
f20bfc543c | |||
1e69cefd7c | |||
4e64258c91 | |||
4e63f9c250 | |||
0e2ac8afab | |||
33fa159dad | |||
0452eb3e5f | |||
05e3d8db83 | |||
1c9f86e172 | |||
48d1b289a3 | |||
4c23ef4e9e | |||
3c28cf1909 | |||
4a9c95b44e | |||
bcc8d5e76c | |||
85fedec2f6 | |||
bc9984f516 | |||
6ddfe1795c | |||
729ca4f65c | |||
af19b1633a | |||
e385b83318 | |||
b1c396b6d3 | |||
f11b7ca0d8 | |||
55a8db967a | |||
79744df87e | |||
0a662498a7 | |||
b1c38cbb07 | |||
8f2b7f2f49 | |||
52851046ce | |||
8e4929c3b0 | |||
6df28758f0 | |||
7ef14eb86d | |||
e03c25a600 | |||
45de247efd | |||
203dc88bb5 | |||
a01df92007 | |||
cb29d9456f | |||
5bf2bdbb48 | |||
0ad22f7e1c | |||
23c4e150cf | |||
afa1b27582 | |||
ee9d14d32b | |||
30982c833a | |||
ab3f0b50e1 | |||
49e1f4739e | |||
fc35497423 | |||
c379787a90 | |||
97bc679cbb | |||
0ea1c8f138 | |||
9d909a73b2 | |||
c89b7b74e0 | |||
6b629d8a9b | |||
75d602dd90 | |||
fd44bc2bec | |||
35448f6017 | |||
a70363bd95 | |||
40c672f246 | |||
2619562530 | |||
b7b9c521e5 | |||
9c7fd01117 | |||
a12bb1642d | |||
80e8de2548 | |||
0b80f1a7c7 | |||
2698d90637 | |||
633028c8ff | |||
155f1e50b4 | |||
a48e7c17ba | |||
cfb920114a | |||
0bfce6e29d | |||
5139bc9a80 | |||
d24fe25047 | |||
54387a7ab8 | |||
e1fd6cbd31 | |||
a030f04ccb | |||
9d6e58b3d8 | |||
728b64ffc3 | |||
284904a02a | |||
2ec8a445c3 | |||
16b8edd082 | |||
91012b577b | |||
5a5a2a5d69 | |||
d7750d34de | |||
c3ea09a2dd | |||
9f6dca1170 | |||
919a190971 | |||
e5d93a66cc | |||
a57d06b401 | |||
c2909dda1d | |||
a53f25c92f | |||
696f8afcd5 | |||
e2a81df152 | |||
1752c5437c | |||
54b951d3cc | |||
fcf752905b | |||
a4a5b97265 | |||
4087d6892f | |||
2b7e06bb25 | |||
1b2180818e | |||
8c23b390c2 | |||
677e5f3c84 | |||
2b860f19cb | |||
9a583dcd44 | |||
8aae51ab26 | |||
e769f7ee02 | |||
852f6f2819 | |||
6cb6db0bc4 | |||
2e0907af1e | |||
35f29656bc | |||
bb1f18d973 | |||
261eebe127 | |||
8f0ea8dcd9 | |||
eb36c8dd0c | |||
7026760327 | |||
b2349ee99f | |||
d185b3ad70 | |||
8ecf1cadf8 | |||
9961142ab1 | |||
ae299c55c7 | |||
72f60249b8 | |||
d6fed2eaaf | |||
b48ed34ac4 | |||
561691ac22 | |||
0d4511e74b | |||
4e7153c45f | |||
d0a6a067cc | |||
742beda313 | |||
70462c4c72 | |||
7ea721c487 | |||
4d8ee983ef | |||
0bbf37f535 | |||
8e847d05bd | |||
55612a1365 | |||
a1d8b2045d | |||
f1f4cd7d2c | |||
4350bcbe24 | |||
a899495978 | |||
6302ff23d2 | |||
2196bde820 | |||
a861030ecb | |||
c96e195666 | |||
d7d32075e7 | |||
f527700426 | |||
d2bf027c23 | |||
ac1f3332dc | |||
2c64f72ebc | |||
51a8670a13 | |||
b9f6cd9226 | |||
7010682122 | |||
0e82facfb4 | |||
afdff95453 | |||
b11f12b1db | |||
4df906e32c | |||
fee7abed7c | |||
d1a5d0dd7e | |||
d1e06b1c7e | |||
458b2b5c55 | |||
c0b1cd7674 | |||
8305a52ae2 | |||
b77cdfe96b | |||
0dcb261b4c | |||
46bddbf067 | |||
b8b6c0cd98 | |||
64fbbcf3e8 | |||
a4c6b76686 | |||
c8c7f77813 | |||
dde4314127 | |||
0b620f54f3 | |||
dc10ab0e66 | |||
8d92e3d78d | |||
ae66df6d9a | |||
ed3108fbd4 | |||
f2199f1712 | |||
e5810b31c5 | |||
d8b6a06522 | |||
c8ab6c728d | |||
e854623967 | |||
0b4822c1e3 | |||
fcb82c243f | |||
4415bee62a | |||
42b00110e7 | |||
0cce67dd15 | |||
f7a300fb30 | |||
ca260b700f | |||
8e9fbff5bd | |||
f2a8b82249 | |||
11a3cf84fa | |||
d506e5d50c | |||
7f8b8a7eb5 | |||
06af8e3a35 | |||
bf8c3078db | |||
15c7be1979 | |||
285dc8cff0 | |||
d7e399dbf9 | |||
1e25d3e3e9 | |||
d5c0a6e252 | |||
8a5aa9bf6f | |||
6584074b9c | |||
1d773dfc76 | |||
193b9e1ae8 | |||
32f95818db | |||
bcb7c72907 | |||
51a33f330c | |||
da2eddfb5a | |||
75e9a02bd2 | |||
af239027d5 | |||
6ce83e5271 | |||
c804a7e77d | |||
9d9acab603 | |||
8e42eb0546 | |||
f39c0e6bd9 | |||
81ac09695a | |||
26c5653182 | |||
0f7a3875f7 | |||
0036ecf956 | |||
96554de17a | |||
fabd1e39ae | |||
f992bfa8ff | |||
f1a04674fb | |||
ec4c31e37d | |||
ac520cd872 | |||
50e493d692 | |||
d49d8bc559 | |||
3e94b58afb | |||
1b81973358 | |||
880ca9a57d | |||
4d8d12f917 | |||
e78e4165da | |||
e4c7c24ae4 | |||
3b8daf7cc9 | |||
104e70c383 | |||
82ac7d195d | |||
d19d075326 | |||
ae03e4679e | |||
05b0e2c164 | |||
ff5680fb0e | |||
88cf0efb81 | |||
7783b200a3 | |||
d13954970e | |||
743a781eba | |||
f53f3c77be | |||
61b61ce960 | |||
09e6b80fd6 | |||
4cad5f7b40 | |||
3f43ff22a8 | |||
cf6bbbae70 | |||
ac1ef5139c | |||
ce0775239d | |||
56f267146f | |||
d98af5a0b1 | |||
3b3c874175 | |||
1f19e5cb3e | |||
f2062e75a1 | |||
ff5df458af | |||
6a8c5ca650 | |||
01a3516478 | |||
868ce06f67 | |||
e5b6dc5508 | |||
ee86322ab4 | |||
52d19bf4a6 | |||
fdcc1dcb36 | |||
5805ac83f7 | |||
772048092b | |||
be1219a73f | |||
9ab057fafc | |||
f9b6c8cef9 | |||
f159973d8b | |||
4a2f97710e | |||
735a8e77e2 | |||
e50cc20f76 | |||
5c19c6ea7f | |||
4c0b6c71ac | |||
cfc065b41b | |||
d81381bda6 | |||
6613553c13 | |||
9a304cc198 | |||
ebaec17703 | |||
6fcc06bfe0 | |||
2ba66f4f91 | |||
f9fc32e89c | |||
ee275d36bf | |||
ed39123f4e | |||
68726b0921 | |||
74a91aafe8 | |||
a15853ed55 | |||
7c51657aa1 | |||
86e9639d0c | |||
1620131ed5 | |||
743ee53bd9 | |||
7a04d97bdf | |||
6c99194f42 | |||
df8321c282 | |||
9bfbf0ed07 | |||
8f5606edbd | |||
a0f921398f | |||
bf287ab8c4 | |||
cec11f3843 | |||
f66bad43db | |||
b36ed44ca2 | |||
f5aca42e95 | |||
6e9ae69593 | |||
3c0cb1dd12 | |||
de56d02230 | |||
c04e8869f7 | |||
9d60d0b4c5 | |||
a42b181b76 | |||
24657797ad | |||
3981b55b40 | |||
d98471dbea | |||
9cd94f639c | |||
afd950c671 | |||
9328db4c19 | |||
7b40e23840 | |||
8ca7bdcd36 | |||
d51491e1eb | |||
6e1807e51d | |||
785ff6b3df | |||
408016a34e | |||
fc77fa68d1 | |||
48b24e5c65 | |||
b2045fd034 | |||
782e9fadb5 | |||
e48ac56cc5 | |||
f110eda465 | |||
e830d5dc7a | |||
2b1f8ac050 | |||
e8d5d678bf | |||
6df5de861c | |||
c35ae4af3e | |||
ae123a3364 | |||
e155aa5f3e | |||
6fa2765f55 | |||
ecb84dda46 | |||
62e58f2fe9 | |||
0a4e34a142 | |||
1be50bcdb2 | |||
f0c33ef1bf | |||
b059754fe5 | |||
cd4d6483c5 | |||
8dcccb4444 | |||
693da3ee62 | |||
5a9e1a0c94 | |||
a539e4b362 | |||
d13fb1d53d | |||
d9cb82ca6c | |||
8f231e5678 | |||
604242a76c | |||
0d0d33f104 | |||
58907a2b3f | |||
83f6ec86d4 | |||
f832f702cb | |||
2eb55696ed | |||
2ef31322c4 | |||
0d088ae198 | |||
a184240855 | |||
fdd941c84d | |||
419e0adff9 | |||
60a16aafbd | |||
1b24168791 | |||
8909c1e338 | |||
ea7c822d37 | |||
8c183a348f | |||
835208d616 | |||
6ec8143beb | |||
0f57ddefff | |||
dd37e8bf49 | |||
1f733b04f7 | |||
99c03d3073 | |||
dd3b440f8d | |||
feef105acf | |||
ed4154e62d | |||
e6c204cdba | |||
0e83d485a3 | |||
0000f26fee | |||
a74ab9d2c1 | |||
0ca96adaaf | |||
4cea9bfa3f | |||
59b5c21cf6 | |||
05fb11b1f0 | |||
17f9a48252 | |||
4b55746f6c | |||
9836dfcfd4 | |||
6501626692 | |||
184d65cc62 | |||
c93bb4708b | |||
e9d6da6c28 | |||
d7ed1a5d30 | |||
d29c3abc7d | |||
448e0fe067 | |||
faa02afae0 | |||
342eb03731 | |||
58388935b7 | |||
2e451f40e5 | |||
868229a044 | |||
73590572b0 | |||
5b6b059b40 | |||
060cea219b | |||
af9d82c02d | |||
cc8fb66da2 | |||
f0edc7b931 | |||
b39632abb0 | |||
c59b859ec0 | |||
a46939b591 | |||
bfb4a25026 | |||
646276b37c | |||
58f9d86d0b | |||
cf0a268fb1 | |||
ec783ae587 | |||
f50d44792c | |||
b225b0200e | |||
507f9b7ae2 | |||
5991b82cde | |||
f38bc8d09e | |||
9824f283de | |||
341d866c00 | |||
965ddcb564 | |||
a0a1a101e8 | |||
277c922ec3 | |||
f372627d61 | |||
1be86325d5 | |||
6d71454aa0 | |||
75d6aab0bb | |||
496dce093a | |||
f740ba0ffe | |||
a82af054a4 | |||
c80e3da644 | |||
af9bb566f8 | |||
5ca929417b | |||
3c1c44bda1 | |||
c05977f144 | |||
55333ef1ac | |||
49ad6d2aa8 | |||
b7e4373d6e | |||
699c074816 | |||
c26855f953 | |||
1457b38e7e | |||
55d08c5be3 | |||
ffbfbd43cb | |||
cb24fe5c5d | |||
aa81d8f12d | |||
2ee1a0241b | |||
89bc7a037d | |||
a21683555a | |||
5a98235ee0 | |||
3ce836fd8b | |||
5a5f7814ab | |||
907d475897 | |||
41503fc0b2 | |||
cfc7646a5a | |||
7103336456 | |||
48db4af56d | |||
8285b5d9a7 | |||
43218bd027 | |||
042fae143d | |||
f6f997525f | |||
753fb5e1b2 | |||
06a42df732 | |||
66a2a62c7b | |||
41bbbde232 | |||
373c0ff7d0 | |||
30345d450c | |||
b9dc83466d | |||
f26175a99f | |||
c7881e6eb4 | |||
97b98a4192 | |||
fc65d3f43a | |||
aa87695f3c | |||
c3fb84397a | |||
8d78cd97d0 | |||
24d2c4089c | |||
38f47c65a1 | |||
896096374c | |||
0e2326ed06 | |||
a07db454be | |||
87a4a81798 | |||
f0ee743ea1 | |||
fbac1e9d95 | |||
d8536ed78e | |||
848dae52ab | |||
f62a470dfa | |||
16a8409014 | |||
dfa5b8aba5 | |||
54270e960f | |||
6541b7fcef | |||
19af49a49b | |||
99e189cae3 | |||
6f68563df2 | |||
df03b2a156 | |||
e1211ba01b | |||
24ea3f0ee8 | |||
79045ab283 | |||
e27189364e | |||
ba224e4eb9 | |||
336950628e | |||
6ede552292 | |||
07b6356b38 | |||
4c5730a222 | |||
8ab84c8d91 | |||
89ef82337d | |||
babaeb2d0c | |||
52b8f24b75 | |||
464addfc8d | |||
8df73c2f6f | |||
9ab3971e63 | |||
09888cb89f | |||
2abcc9ce8f | |||
5b0e92f034 | |||
a3bfb3d25c | |||
2c1df6702c | |||
b999e23d27 | |||
e0db9f3ea1 | |||
dcc3ca664a | |||
7d37e3f668 | |||
e48f6bbec4 | |||
d27caaabc3 | |||
0dee706a87 | |||
7d527beea8 | |||
4733778460 | |||
c048f4a356 | |||
65e245c003 | |||
600d59ff58 | |||
703628f354 | |||
693de081ef | |||
f367249bab | |||
2841db082c | |||
ce24f974aa | |||
1f93e6fd3f | |||
7dfde9029f | |||
f5d62b828b | |||
703eb682b7 | |||
5cae3192b1 | |||
83e143032d | |||
e0e7cc24da | |||
8bc746d577 | |||
a84f403e79 | |||
e4f4482d2a | |||
844b4e96cd | |||
f3b4e03243 | |||
4f5e2a438e | |||
32c980e29e | |||
bd29392825 | |||
9756432876 | |||
8b2d1a9b21 | |||
adbd97323c | |||
77a8b2d751 | |||
08c850938b | |||
7db598c04e | |||
1ef224f5fd | |||
b01c48698d | |||
1546fa276a | |||
f50bd74b46 | |||
414a5c36c8 | |||
c4455b6915 | |||
9013caeab4 | |||
40a1e5a9b2 | |||
4dadcc1dfd | |||
0b8678f7ee | |||
aa8dc94a97 | |||
20996e994e | |||
db17f04830 | |||
b99fca62d8 | |||
8818ce3306 | |||
25d3f2e06e | |||
1537682026 | |||
ebd05be2c4 | |||
c90792d876 | |||
b92630804f | |||
1afd5ef95a | |||
e5cc2c6d98 | |||
84fdd4d737 | |||
5fe2772567 | |||
d2f9b66424 | |||
c9b39f2eba | |||
2ecc2119fc | |||
49b7ebdc53 | |||
70f72c524d | |||
87e0ac743a | |||
b5b8b0e9cd | |||
d10b358767 | |||
0887fa8fde | |||
799dd48861 | |||
919d1f349f | |||
a36b6e8315 | |||
69f9dfc9f6 | |||
27efe68f1c | |||
e9223618ba | |||
e9672a5285 | |||
7d724d9931 | |||
edcc6b2031 | |||
a71948c9b7 | |||
a395e347df | |||
f4b336a974 | |||
087d5aa7e7 | |||
3dc42bf542 | |||
cef1d2d0f1 | |||
961d28331b | |||
2bd2114617 | |||
9327ae8997 | |||
11e55a1b91 | |||
1bc6477f58 | |||
f2d2d84832 | |||
709436aab4 | |||
a315d1d966 | |||
81a22aed38 | |||
de4e18bf47 | |||
a25c00039d | |||
99076a3460 | |||
cf7094d00c | |||
51c4e88258 | |||
2dd69398c7 | |||
4a98493970 | |||
fbf06069f3 | |||
87ed848e56 | |||
8949464294 | |||
0b83d1008e | |||
cb72efb4c8 | |||
f0e4b742f8 | |||
df907c3ddd | |||
ee2f9eaf05 | |||
8739562367 | |||
ce4f086fe3 | |||
aa7dafe910 | |||
e8399b5657 | |||
2500cb2621 | |||
d20a4dc2b7 | |||
85776b254b | |||
1c84a20c43 | |||
c332f6f0ec | |||
be6c29d907 | |||
07ed5e1cd9 | |||
0052e60643 | |||
25e72558eb | |||
0413afc2a8 | |||
c953f5993d | |||
8124de52b0 | |||
dbf306672d | |||
50e7336720 | |||
f79054fe8c | |||
1db322b42f | |||
dca8c837f5 | |||
544373efd7 | |||
0c4dee8f9f | |||
09873558ae | |||
7e30ab54d6 | |||
c053eaf14d | |||
741a88ffa6 | |||
0b8af252b2 | |||
07cfabb7e6 | |||
5056233f9d | |||
6e5e82f8d0 | |||
725641270f | |||
dfc104d8eb | |||
dc24db43ff | |||
d1da50a479 | |||
11ca358242 | |||
85a8768424 | |||
7af35b56c1 | |||
b95d333179 | |||
d7f82ba31e | |||
eecb12285c | |||
699b7ed5c3 | |||
9d757a7345 | |||
03f64b60ec | |||
5f19fa7c84 | |||
5f4fb5e4ce | |||
ad0f49e1d2 | |||
68c1997837 | |||
4eceebaad8 | |||
2900504f27 | |||
645f662e3e | |||
5f1ba45966 | |||
dcbfe73891 | |||
2d62bad5fb | |||
ce1b2f4dcc | |||
d7b0d16830 | |||
be2e302104 | |||
7eb2de5821 | |||
b43341cfef | |||
45a3310320 | |||
cd04cb01cc | |||
96345b1626 | |||
035795d468 | |||
faa7f3768e | |||
b5203baba8 | |||
41f58fce5b | |||
6c8349a306 | |||
63acc24047 | |||
96b2a1a9ba | |||
c0562bf860 | |||
9ed2b74661 | |||
4f4815251e | |||
4184f8a770 | |||
a351e5e824 | |||
a92ff3b658 | |||
f0b38ce00e | |||
e8e6aa6617 | |||
2d36978eab | |||
28e4874434 | |||
1a919155cb | |||
a62cca36ad | |||
ff5d15b5e5 | |||
6844da4628 | |||
bf701c4f7b | |||
c03b9fdfeb | |||
830689f1cb | |||
862aece9fc | |||
45f9e6758b | |||
80b8404578 | |||
09ef9515af | |||
6303698ba8 | |||
abc0c2d2a2 | |||
73ddaf48be | |||
f8d992dedb | |||
53cbc64f9d | |||
af322b6de4 | |||
65d1042e04 | |||
2d8be43085 | |||
b1c7c228c3 | |||
6dd2fc3c45 | |||
2646469478 | |||
b16a0f48bd | |||
727e23419e | |||
f3d7343f93 | |||
2a87039a18 | |||
4509163e54 | |||
97c421f34b | |||
2bc865b024 | |||
0467df190e | |||
f3c9f5b2a3 | |||
ffa5eae47f | |||
fc25356fdf | |||
baee2572ec | |||
a9685adda5 | |||
8c1890ff25 | |||
05b0d474e0 | |||
5be9d1fc6a | |||
02791e765f | |||
abf1f0e348 | |||
86bf75fe54 | |||
b23a6d5359 | |||
212904537b | |||
c46cd5e7e5 | |||
480a765f5f | |||
65b6ea416c | |||
2f56c3cecf | |||
5c1432c670 | |||
c49e83a926 | |||
0dc2c46d49 | |||
245d8b7b5c | |||
5884af8af1 | |||
08d52be20d | |||
941f05e7fa | |||
493cefaa6e | |||
b75233cbf1 | |||
7c964e7058 | |||
8fe89133ac | |||
3295803b5c | |||
be65746ddb | |||
6a762906dc | |||
7448c69213 | |||
8026fa70ac | |||
e99bfaf17f | |||
210f27f71e | |||
11a51e1983 | |||
eb086af5e2 | |||
6b8e826c75 | |||
d8fd09345e | |||
3a068e9813 | |||
fadfd65136 | |||
fb896260c0 | |||
e8278c6da7 | |||
7c8f94bfe1 | |||
eeb9716173 | |||
73fb0551b1 | |||
379436655e | |||
2b1bb67844 | |||
b081d309c2 | |||
9a43dc9ad0 | |||
7c0a8f6b74 | |||
fc97777192 | |||
1bd6b33598 | |||
73702c23f7 | |||
40f74fc62a | |||
85f1584844 | |||
7c116acf0f | |||
85ebbcfb82 | |||
e51f900cdb | |||
134cd2dc81 | |||
7f70a86289 | |||
f77c2e8254 | |||
be66ee52cd | |||
d9eb4c5248 | |||
24c87a47a6 | |||
f44529769c | |||
4f739758e5 | |||
ec9908b317 | |||
38e1ad5ade | |||
7ea3830e56 | |||
7d75943cc2 | |||
6afa494b57 | |||
b0e74d348a | |||
0064c1df74 | |||
8831e1d946 | |||
d10ed974cb | |||
27ea604467 | |||
9b9d08c3db | |||
a65fde6374 | |||
391d477e45 | |||
d4fdfb70c1 | |||
2ce74ff9f0 | |||
3715a222fd | |||
e0370947ca | |||
fdefe23d46 | |||
42552e74eb | |||
66001a3e88 | |||
9de452853e | |||
50d3264be5 | |||
c78bb979ec | |||
6649f7ab72 | |||
9ecabe4629 | |||
6e83b890bd | |||
db95b967fe | |||
9ea41595b7 | |||
74b4fe5cee | |||
1ef654f19b | |||
f030128686 | |||
0fb7d1237f | |||
2497101c32 | |||
9a0feb1c35 | |||
ab411a6a9b |
@ -1,18 +1,30 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2023.10.5
|
current_version = 2024.6.1
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
serialize = {major}.{minor}.{patch}
|
serialize =
|
||||||
|
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
||||||
|
{major}.{minor}.{patch}
|
||||||
message = release: {new_version}
|
message = release: {new_version}
|
||||||
tag_name = version/{new_version}
|
tag_name = version/{new_version}
|
||||||
|
|
||||||
|
[bumpversion:part:rc_t]
|
||||||
|
values =
|
||||||
|
rc
|
||||||
|
final
|
||||||
|
optional_value = final
|
||||||
|
|
||||||
[bumpversion:file:pyproject.toml]
|
[bumpversion:file:pyproject.toml]
|
||||||
|
|
||||||
|
[bumpversion:file:package.json]
|
||||||
|
|
||||||
[bumpversion:file:docker-compose.yml]
|
[bumpversion:file:docker-compose.yml]
|
||||||
|
|
||||||
[bumpversion:file:schema.yml]
|
[bumpversion:file:schema.yml]
|
||||||
|
|
||||||
|
[bumpversion:file:blueprints/schema.json]
|
||||||
|
|
||||||
[bumpversion:file:authentik/__init__.py]
|
[bumpversion:file:authentik/__init__.py]
|
||||||
|
|
||||||
[bumpversion:file:internal/constants/constants.go]
|
[bumpversion:file:internal/constants/constants.go]
|
||||||
|
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@ -1 +1 @@
|
|||||||
github: [BeryJu]
|
custom: https://goauthentik.io/pricing/
|
||||||
|
2
.github/ISSUE_TEMPLATE/question.md
vendored
2
.github/ISSUE_TEMPLATE/question.md
vendored
@ -9,7 +9,7 @@ assignees: ""
|
|||||||
**Describe your question/**
|
**Describe your question/**
|
||||||
A clear and concise description of what you're trying to do.
|
A clear and concise description of what you're trying to do.
|
||||||
|
|
||||||
**Relevant infos**
|
**Relevant info**
|
||||||
i.e. Version of other software you're using, specifics of your setup
|
i.e. Version of other software you're using, specifics of your setup
|
||||||
|
|
||||||
**Screenshots**
|
**Screenshots**
|
||||||
|
@ -9,9 +9,6 @@ inputs:
|
|||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Generate config
|
|
||||||
id: ev
|
|
||||||
uses: ./.github/actions/docker-push-variables
|
|
||||||
- name: Find Comment
|
- name: Find Comment
|
||||||
uses: peter-evans/find-comment@v2
|
uses: peter-evans/find-comment@v2
|
||||||
id: fc
|
id: fc
|
||||||
@ -57,9 +54,10 @@ runs:
|
|||||||
authentik:
|
authentik:
|
||||||
outposts:
|
outposts:
|
||||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||||
image:
|
global:
|
||||||
repository: ghcr.io/goauthentik/dev-server
|
image:
|
||||||
tag: ${{ inputs.tag }}
|
repository: ghcr.io/goauthentik/dev-server
|
||||||
|
tag: ${{ inputs.tag }}
|
||||||
```
|
```
|
||||||
|
|
||||||
For arm64, use these values:
|
For arm64, use these values:
|
||||||
@ -68,9 +66,10 @@ runs:
|
|||||||
authentik:
|
authentik:
|
||||||
outposts:
|
outposts:
|
||||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||||
image:
|
global:
|
||||||
repository: ghcr.io/goauthentik/dev-server
|
image:
|
||||||
tag: ${{ inputs.tag }}-arm64
|
repository: ghcr.io/goauthentik/dev-server
|
||||||
|
tag: ${{ inputs.tag }}-arm64
|
||||||
```
|
```
|
||||||
|
|
||||||
Afterwards, run the upgrade commands from the latest release notes.
|
Afterwards, run the upgrade commands from the latest release notes.
|
||||||
|
73
.github/actions/docker-push-variables/action.yml
vendored
73
.github/actions/docker-push-variables/action.yml
vendored
@ -1,64 +1,47 @@
|
|||||||
|
---
|
||||||
name: "Prepare docker environment variables"
|
name: "Prepare docker environment variables"
|
||||||
description: "Prepare docker environment variables"
|
description: "Prepare docker environment variables"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
image-name:
|
||||||
|
required: true
|
||||||
|
description: "Docker image prefix"
|
||||||
|
image-arch:
|
||||||
|
required: false
|
||||||
|
description: "Docker image arch"
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
shouldBuild:
|
shouldBuild:
|
||||||
description: "Whether to build image or not"
|
description: "Whether to build image or not"
|
||||||
value: ${{ steps.ev.outputs.shouldBuild }}
|
value: ${{ steps.ev.outputs.shouldBuild }}
|
||||||
branchName:
|
|
||||||
description: "Branch name"
|
|
||||||
value: ${{ steps.ev.outputs.branchName }}
|
|
||||||
branchNameContainer:
|
|
||||||
description: "Branch name (for containers)"
|
|
||||||
value: ${{ steps.ev.outputs.branchNameContainer }}
|
|
||||||
timestamp:
|
|
||||||
description: "Timestamp"
|
|
||||||
value: ${{ steps.ev.outputs.timestamp }}
|
|
||||||
sha:
|
sha:
|
||||||
description: "sha"
|
description: "sha"
|
||||||
value: ${{ steps.ev.outputs.sha }}
|
value: ${{ steps.ev.outputs.sha }}
|
||||||
shortHash:
|
|
||||||
description: "shortHash"
|
|
||||||
value: ${{ steps.ev.outputs.shortHash }}
|
|
||||||
version:
|
version:
|
||||||
description: "version"
|
description: "Version"
|
||||||
value: ${{ steps.ev.outputs.version }}
|
value: ${{ steps.ev.outputs.version }}
|
||||||
versionFamily:
|
prerelease:
|
||||||
description: "versionFamily"
|
description: "Prerelease"
|
||||||
value: ${{ steps.ev.outputs.versionFamily }}
|
value: ${{ steps.ev.outputs.prerelease }}
|
||||||
|
|
||||||
|
imageTags:
|
||||||
|
description: "Docker image tags"
|
||||||
|
value: ${{ steps.ev.outputs.imageTags }}
|
||||||
|
imageMainTag:
|
||||||
|
description: "Docker image main tag"
|
||||||
|
value: ${{ steps.ev.outputs.imageMainTag }}
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Generate config
|
- name: Generate config
|
||||||
id: ev
|
id: ev
|
||||||
shell: python
|
shell: bash
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: ${{ inputs.image-name }}
|
||||||
|
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||||
|
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||||
run: |
|
run: |
|
||||||
"""Helper script to get the actual branch name, docker safe"""
|
python3 ${{ github.action_path }}/push_vars.py
|
||||||
import configparser
|
|
||||||
import os
|
|
||||||
from time import time
|
|
||||||
|
|
||||||
parser = configparser.ConfigParser()
|
|
||||||
parser.read(".bumpversion.cfg")
|
|
||||||
|
|
||||||
branch_name = os.environ["GITHUB_REF"]
|
|
||||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
|
||||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
|
||||||
|
|
||||||
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
|
||||||
version = parser.get("bumpversion", "current_version")
|
|
||||||
version_family = ".".join(version.split(".")[:-1])
|
|
||||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
|
||||||
|
|
||||||
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
|
||||||
|
|
||||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
|
||||||
print("branchName=%s" % branch_name, file=_output)
|
|
||||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
|
||||||
print("timestamp=%s" % int(time()), file=_output)
|
|
||||||
print("sha=%s" % sha, file=_output)
|
|
||||||
print("shortHash=%s" % sha[:7], file=_output)
|
|
||||||
print("shouldBuild=%s" % should_build, file=_output)
|
|
||||||
print("version=%s" % version, file=_output)
|
|
||||||
print("versionFamily=%s" % version_family, file=_output)
|
|
||||||
|
62
.github/actions/docker-push-variables/push_vars.py
vendored
Normal file
62
.github/actions/docker-push-variables/push_vars.py
vendored
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
"""Helper script to get the actual branch name, docker safe"""
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
from time import time
|
||||||
|
|
||||||
|
parser = configparser.ConfigParser()
|
||||||
|
parser.read(".bumpversion.cfg")
|
||||||
|
|
||||||
|
should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower()
|
||||||
|
|
||||||
|
branch_name = os.environ["GITHUB_REF"]
|
||||||
|
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||||
|
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||||
|
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
|
||||||
|
|
||||||
|
image_names = os.getenv("IMAGE_NAME").split(",")
|
||||||
|
image_arch = os.getenv("IMAGE_ARCH") or None
|
||||||
|
|
||||||
|
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
|
||||||
|
is_release = "dev" not in image_names[0]
|
||||||
|
|
||||||
|
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
|
||||||
|
|
||||||
|
# 2042.1.0 or 2042.1.0-rc1
|
||||||
|
version = parser.get("bumpversion", "current_version")
|
||||||
|
# 2042.1
|
||||||
|
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
|
||||||
|
prerelease = "-" in version
|
||||||
|
|
||||||
|
image_tags = []
|
||||||
|
if is_release:
|
||||||
|
for name in image_names:
|
||||||
|
image_tags += [
|
||||||
|
f"{name}:{version}",
|
||||||
|
]
|
||||||
|
if not prerelease:
|
||||||
|
image_tags += [
|
||||||
|
f"{name}:latest",
|
||||||
|
f"{name}:{version_family}",
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
suffix = ""
|
||||||
|
if image_arch and image_arch != "amd64":
|
||||||
|
suffix = f"-{image_arch}"
|
||||||
|
for name in image_names:
|
||||||
|
image_tags += [
|
||||||
|
f"{name}:gh-{sha}{suffix}", # Used for ArgoCD and PR comments
|
||||||
|
f"{name}:gh-{safe_branch_name}{suffix}", # For convenience
|
||||||
|
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
|
||||||
|
]
|
||||||
|
|
||||||
|
image_main_tag = image_tags[0]
|
||||||
|
image_tags_rendered = ",".join(image_tags)
|
||||||
|
|
||||||
|
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||||
|
print(f"shouldBuild={should_build}", file=_output)
|
||||||
|
print(f"sha={sha}", file=_output)
|
||||||
|
print(f"version={version}", file=_output)
|
||||||
|
print(f"prerelease={prerelease}", file=_output)
|
||||||
|
print(f"imageTags={image_tags_rendered}", file=_output)
|
||||||
|
print(f"imageMainTag={image_main_tag}", file=_output)
|
7
.github/actions/docker-push-variables/test.sh
vendored
Executable file
7
.github/actions/docker-push-variables/test.sh
vendored
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash -x
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
GITHUB_OUTPUT=/dev/stdout \
|
||||||
|
GITHUB_REF=ref \
|
||||||
|
GITHUB_SHA=sha \
|
||||||
|
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||||
|
python $SCRIPT_DIR/push_vars.py
|
12
.github/actions/setup/action.yml
vendored
12
.github/actions/setup/action.yml
vendored
@ -4,7 +4,7 @@ description: "Setup authentik testing environment"
|
|||||||
inputs:
|
inputs:
|
||||||
postgresql_version:
|
postgresql_version:
|
||||||
description: "Optional postgresql image tag"
|
description: "Optional postgresql image tag"
|
||||||
default: "12"
|
default: "16"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@ -16,25 +16,25 @@ runs:
|
|||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||||
- name: Setup python and restore poetry
|
- name: Setup python and restore poetry
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version-file: 'pyproject.toml'
|
python-version-file: "pyproject.toml"
|
||||||
cache: "poetry"
|
cache: "poetry"
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version-file: web/package.json
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Setup go
|
- name: Setup go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Setup dependencies
|
- name: Setup dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||||
poetry install
|
poetry install
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
- name: Generate config
|
- name: Generate config
|
||||||
|
4
.github/actions/setup/docker-compose.yml
vendored
4
.github/actions/setup/docker-compose.yml
vendored
@ -1,8 +1,6 @@
|
|||||||
version: "3.7"
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgresql:
|
postgresql:
|
||||||
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||||
volumes:
|
volumes:
|
||||||
- db-data:/var/lib/postgresql/data
|
- db-data:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
|
2
.github/codespell-words.txt
vendored
2
.github/codespell-words.txt
vendored
@ -3,3 +3,5 @@ keypairs
|
|||||||
hass
|
hass
|
||||||
warmup
|
warmup
|
||||||
ontext
|
ontext
|
||||||
|
singed
|
||||||
|
assertIn
|
||||||
|
40
.github/dependabot.yml
vendored
40
.github/dependabot.yml
vendored
@ -21,7 +21,10 @@ updates:
|
|||||||
labels:
|
labels:
|
||||||
- dependencies
|
- dependencies
|
||||||
- package-ecosystem: npm
|
- package-ecosystem: npm
|
||||||
directory: "/web"
|
directories:
|
||||||
|
- "/web"
|
||||||
|
- "/tests/wdio"
|
||||||
|
- "/web/sfe"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
time: "04:00"
|
time: "04:00"
|
||||||
@ -30,11 +33,11 @@ updates:
|
|||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "web:"
|
prefix: "web:"
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
groups:
|
||||||
sentry:
|
sentry:
|
||||||
patterns:
|
patterns:
|
||||||
- "@sentry/*"
|
- "@sentry/*"
|
||||||
|
- "@spotlightjs/*"
|
||||||
babel:
|
babel:
|
||||||
patterns:
|
patterns:
|
||||||
- "@babel/*"
|
- "@babel/*"
|
||||||
@ -51,37 +54,10 @@ updates:
|
|||||||
esbuild:
|
esbuild:
|
||||||
patterns:
|
patterns:
|
||||||
- "@esbuild/*"
|
- "@esbuild/*"
|
||||||
- package-ecosystem: npm
|
rollup:
|
||||||
directory: "/tests/wdio"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "04:00"
|
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
commit-message:
|
|
||||||
prefix: "web:"
|
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
|
||||||
sentry:
|
|
||||||
patterns:
|
patterns:
|
||||||
- "@sentry/*"
|
- "@rollup/*"
|
||||||
babel:
|
- "rollup-*"
|
||||||
patterns:
|
|
||||||
- "@babel/*"
|
|
||||||
- "babel-*"
|
|
||||||
eslint:
|
|
||||||
patterns:
|
|
||||||
- "@typescript-eslint/*"
|
|
||||||
- "eslint"
|
|
||||||
- "eslint-*"
|
|
||||||
storybook:
|
|
||||||
patterns:
|
|
||||||
- "@storybook/*"
|
|
||||||
- "*storybook*"
|
|
||||||
esbuild:
|
|
||||||
patterns:
|
|
||||||
- "@esbuild/*"
|
|
||||||
wdio:
|
wdio:
|
||||||
patterns:
|
patterns:
|
||||||
- "@wdio/*"
|
- "@wdio/*"
|
||||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -27,7 +27,6 @@ If an API change has been made
|
|||||||
If changes to the frontend have been made
|
If changes to the frontend have been made
|
||||||
|
|
||||||
- [ ] The code has been formatted (`make web`)
|
- [ ] The code has been formatted (`make web`)
|
||||||
- [ ] The translation files have been updated (`make i18n-extract`)
|
|
||||||
|
|
||||||
If applicable
|
If applicable
|
||||||
|
|
||||||
|
65
.github/workflows/api-py-publish.yml
vendored
Normal file
65
.github/workflows/api-py-publish.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
name: authentik-api-py-publish
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- "schema.yml"
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
steps:
|
||||||
|
- id: generate_token
|
||||||
|
uses: tibdex/github-app-token@v2
|
||||||
|
with:
|
||||||
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
- name: Install poetry & deps
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
pipx install poetry || true
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||||
|
- name: Setup python and restore poetry
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version-file: "pyproject.toml"
|
||||||
|
cache: "poetry"
|
||||||
|
- name: Generate API Client
|
||||||
|
run: make gen-client-py
|
||||||
|
- name: Publish package
|
||||||
|
working-directory: gen-py-api/
|
||||||
|
run: |
|
||||||
|
poetry build
|
||||||
|
- name: Publish package to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
packages-dir: gen-py-api/dist/
|
||||||
|
# We can't easily upgrade the API client being used due to poetry being poetry
|
||||||
|
# so we'll have to rely on dependabot
|
||||||
|
# - name: Upgrade /
|
||||||
|
# run: |
|
||||||
|
# export VERSION=$(cd gen-py-api && poetry version -s)
|
||||||
|
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
|
||||||
|
# - uses: peter-evans/create-pull-request@v6
|
||||||
|
# id: cpr
|
||||||
|
# with:
|
||||||
|
# token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
# branch: update-root-api-client
|
||||||
|
# commit-message: "root: bump API Client version"
|
||||||
|
# title: "root: bump API Client version"
|
||||||
|
# body: "root: bump API Client version"
|
||||||
|
# delete-branch: true
|
||||||
|
# signoff: true
|
||||||
|
# # ID from https://api.github.com/users/authentik-automation[bot]
|
||||||
|
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||||
|
# - uses: peter-evans/enable-pull-request-automerge@v3
|
||||||
|
# with:
|
||||||
|
# token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||||
|
# merge-method: squash
|
@ -1,4 +1,4 @@
|
|||||||
name: authentik-web-api-publish
|
name: authentik-api-ts-publish
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
@ -31,11 +31,16 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
||||||
- name: Upgrade /web
|
- name: Upgrade /web
|
||||||
working-directory: web/
|
working-directory: web
|
||||||
run: |
|
run: |
|
||||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||||
npm i @goauthentik/api@$VERSION
|
npm i @goauthentik/api@$VERSION
|
||||||
- uses: peter-evans/create-pull-request@v5
|
- name: Upgrade /web/sfe
|
||||||
|
working-directory: web/sfe
|
||||||
|
run: |
|
||||||
|
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||||
|
npm i @goauthentik/api@$VERSION
|
||||||
|
- uses: peter-evans/create-pull-request@v6
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
107
.github/workflows/ci-main.yml
vendored
107
.github/workflows/ci-main.yml
vendored
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
name: authentik-ci-main
|
name: authentik-ci-main
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -6,8 +7,6 @@ on:
|
|||||||
- main
|
- main
|
||||||
- next
|
- next
|
||||||
- version-*
|
- version-*
|
||||||
paths-ignore:
|
|
||||||
- website
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@ -27,10 +26,7 @@ jobs:
|
|||||||
- bandit
|
- bandit
|
||||||
- black
|
- black
|
||||||
- codespell
|
- codespell
|
||||||
- isort
|
|
||||||
- pending-migrations
|
- pending-migrations
|
||||||
- pylint
|
|
||||||
- pyright
|
|
||||||
- ruff
|
- ruff
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -54,7 +50,6 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
steps:
|
steps:
|
||||||
@ -69,7 +64,7 @@ jobs:
|
|||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
mv ../.github ../scripts .
|
||||||
- name: Setup authentik env (stable)
|
- name: Setup authentik env (stable)
|
||||||
@ -108,7 +103,6 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
steps:
|
steps:
|
||||||
@ -122,9 +116,10 @@ jobs:
|
|||||||
poetry run make test
|
poetry run make test
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
flags: unit
|
flags: unit
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -133,15 +128,16 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
uses: helm/kind-action@v1.8.0
|
uses: helm/kind-action@v1.10.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
poetry run coverage run manage.py test tests/integration
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
flags: integration
|
flags: integration
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
test-e2e:
|
test-e2e:
|
||||||
name: test-e2e (${{ matrix.job.name }})
|
name: test-e2e (${{ matrix.job.name }})
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -162,6 +158,8 @@ jobs:
|
|||||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||||
- name: radius
|
- name: radius
|
||||||
glob: tests/e2e/test_provider_radius*
|
glob: tests/e2e/test_provider_radius*
|
||||||
|
- name: scim
|
||||||
|
glob: tests/e2e/test_source_scim*
|
||||||
- name: flows
|
- name: flows
|
||||||
glob: tests/e2e/test_flows*
|
glob: tests/e2e/test_flows*
|
||||||
steps:
|
steps:
|
||||||
@ -170,9 +168,9 @@ jobs:
|
|||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Setup e2e env (chrome, etc)
|
- name: Setup e2e env (chrome, etc)
|
||||||
run: |
|
run: |
|
||||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
docker compose -f tests/e2e/docker-compose.yml up -d
|
||||||
- id: cache-web
|
- id: cache-web
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: web/dist
|
path: web/dist
|
||||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||||
@ -188,9 +186,10 @@ jobs:
|
|||||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v4
|
||||||
with:
|
with:
|
||||||
flags: e2e
|
flags: e2e
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
ci-core-mark:
|
ci-core-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint
|
- lint
|
||||||
@ -203,6 +202,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
build:
|
build:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch:
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
@ -214,7 +219,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.1.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
@ -222,9 +227,12 @@ jobs:
|
|||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/dev-server
|
||||||
|
image-arch: ${{ matrix.arch }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -232,75 +240,22 @@ jobs:
|
|||||||
- name: generate ts client
|
- name: generate ts client
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
tags: ${{ steps.ev.outputs.imageTags }}
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max
|
||||||
cache-from: type=gha
|
platforms: linux/${{ matrix.arch }}
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
build-arm64:
|
|
||||||
needs: ci-core-mark
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
timeout-minutes: 120
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
- name: prepare variables
|
|
||||||
uses: ./.github/actions/docker-push-variables
|
|
||||||
id: ev
|
|
||||||
env:
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
- name: Login to Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: generate ts client
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
secrets: |
|
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
|
||||||
tags: |
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
|
||||||
build-args: |
|
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
|
||||||
platforms: linux/arm64
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
pr-comment:
|
pr-comment:
|
||||||
needs:
|
needs:
|
||||||
- build
|
- build
|
||||||
- build-arm64
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
permissions:
|
permissions:
|
||||||
@ -316,7 +271,9 @@ jobs:
|
|||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/dev-server
|
||||||
- name: Comment on PR
|
- name: Comment on PR
|
||||||
uses: ./.github/actions/comment-pr-instructions
|
uses: ./.github/actions/comment-pr-instructions
|
||||||
with:
|
with:
|
||||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
tag: gh-${{ steps.ev.outputs.imageMainTag }}
|
||||||
|
23
.github/workflows/ci-outpost.yml
vendored
23
.github/workflows/ci-outpost.yml
vendored
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
name: authentik-ci-outpost
|
name: authentik-ci-outpost
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -28,7 +29,7 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v3
|
uses: golangci/golangci-lint-action@v6
|
||||||
with:
|
with:
|
||||||
version: v1.54.2
|
version: v1.54.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
@ -75,7 +76,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.1.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
@ -83,9 +84,11 @@ jobs:
|
|||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -93,21 +96,17 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
tags: ${{ steps.ev.outputs.imageTags }}
|
||||||
tags: |
|
|
||||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
|
||||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
cache-from: type=gha
|
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max
|
||||||
build-binary:
|
build-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
|
112
.github/workflows/ci-web.yml
vendored
112
.github/workflows/ci-web.yml
vendored
@ -12,14 +12,36 @@ on:
|
|||||||
- version-*
|
- version-*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-eslint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
command:
|
||||||
|
- lint
|
||||||
|
- lint:lockfile
|
||||||
|
- tsc
|
||||||
|
- prettier-check
|
||||||
project:
|
project:
|
||||||
- web
|
- web
|
||||||
- tests/wdio
|
- tests/wdio
|
||||||
|
include:
|
||||||
|
- command: tsc
|
||||||
|
project: web
|
||||||
|
extra_setup: |
|
||||||
|
cd sfe/ && npm ci
|
||||||
|
- command: lit-analyse
|
||||||
|
project: web
|
||||||
|
extra_setup: |
|
||||||
|
# lit-analyse doesn't understand path rewrites, so make it
|
||||||
|
# belive it's an actual module
|
||||||
|
cd node_modules/@goauthentik
|
||||||
|
ln -s ../../src/ web
|
||||||
|
exclude:
|
||||||
|
- command: lint:lockfile
|
||||||
|
project: tests/wdio
|
||||||
|
- command: tsc
|
||||||
|
project: tests/wdio
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
@ -28,77 +50,17 @@ jobs:
|
|||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: ${{ matrix.project }}/
|
||||||
run: npm ci
|
|
||||||
- name: Generate API
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: Eslint
|
|
||||||
working-directory: ${{ matrix.project }}/
|
|
||||||
run: npm run lint
|
|
||||||
lint-build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version-file: web/package.json
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: web/package-lock.json
|
|
||||||
- working-directory: web/
|
|
||||||
run: npm ci
|
|
||||||
- name: Generate API
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: TSC
|
|
||||||
working-directory: web/
|
|
||||||
run: npm run tsc
|
|
||||||
lint-prettier:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
project:
|
|
||||||
- web
|
|
||||||
- tests/wdio
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version-file: ${{ matrix.project }}/package.json
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
|
||||||
- working-directory: ${{ matrix.project }}/
|
|
||||||
run: npm ci
|
|
||||||
- name: Generate API
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: prettier
|
|
||||||
working-directory: ${{ matrix.project }}/
|
|
||||||
run: npm run prettier-check
|
|
||||||
lint-lit-analyse:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version-file: web/package.json
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: web/package-lock.json
|
|
||||||
- working-directory: web/
|
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
# lit-analyse doesn't understand path rewrites, so make it
|
${{ matrix.extra_setup }}
|
||||||
# belive it's an actual module
|
|
||||||
cd node_modules/@goauthentik
|
|
||||||
ln -s ../../src/ web
|
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: lit-analyse
|
- name: Lint
|
||||||
working-directory: web/
|
working-directory: ${{ matrix.project }}/
|
||||||
run: npm run lit-analyse
|
run: npm run ${{ matrix.command }}
|
||||||
ci-web-mark:
|
ci-web-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-eslint
|
- lint
|
||||||
- lint-prettier
|
|
||||||
- lint-lit-analyse
|
|
||||||
- lint-build
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
@ -120,3 +82,21 @@ jobs:
|
|||||||
- name: build
|
- name: build
|
||||||
working-directory: web/
|
working-directory: web/
|
||||||
run: npm run build
|
run: npm run build
|
||||||
|
test:
|
||||||
|
needs:
|
||||||
|
- ci-web-mark
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version-file: web/package.json
|
||||||
|
cache: "npm"
|
||||||
|
cache-dependency-path: web/package-lock.json
|
||||||
|
- working-directory: web/
|
||||||
|
run: npm ci
|
||||||
|
- name: Generate API
|
||||||
|
run: make gen-client-ts
|
||||||
|
- name: test
|
||||||
|
working-directory: web/
|
||||||
|
run: npm run test
|
||||||
|
20
.github/workflows/ci-website.yml
vendored
20
.github/workflows/ci-website.yml
vendored
@ -12,20 +12,21 @@ on:
|
|||||||
- version-*
|
- version-*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-prettier:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
command:
|
||||||
|
- lint:lockfile
|
||||||
|
- prettier-check
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version-file: website/package.json
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: website/package-lock.json
|
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: prettier
|
- name: Lint
|
||||||
working-directory: website/
|
working-directory: website/
|
||||||
run: npm run prettier-check
|
run: npm run ${{ matrix.command }}
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -48,7 +49,6 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
job:
|
job:
|
||||||
- build
|
- build
|
||||||
- build-docs-only
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
@ -63,7 +63,7 @@ jobs:
|
|||||||
run: npm run ${{ matrix.job }}
|
run: npm run ${{ matrix.job }}
|
||||||
ci-website-mark:
|
ci-website-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-prettier
|
- lint
|
||||||
- test
|
- test
|
||||||
- build
|
- build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
43
.github/workflows/gen-update-webauthn-mds.yml
vendored
Normal file
43
.github/workflows/gen-update-webauthn-mds.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
name: authentik-gen-update-webauthn-mds
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '30 1 1,15 * *'
|
||||||
|
|
||||||
|
env:
|
||||||
|
POSTGRES_DB: authentik
|
||||||
|
POSTGRES_USER: authentik
|
||||||
|
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: generate_token
|
||||||
|
uses: tibdex/github-app-token@v2
|
||||||
|
with:
|
||||||
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
- name: Setup authentik env
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
- run: poetry run ak update_webauthn_mds
|
||||||
|
- uses: peter-evans/create-pull-request@v6
|
||||||
|
id: cpr
|
||||||
|
with:
|
||||||
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
branch: update-fido-mds-client
|
||||||
|
commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||||
|
title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||||
|
body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||||
|
delete-branch: true
|
||||||
|
signoff: true
|
||||||
|
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||||
|
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||||
|
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||||
|
with:
|
||||||
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||||
|
merge-method: squash
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||||
- uses: peter-evans/create-pull-request@v5
|
- uses: peter-evans/create-pull-request@v6
|
||||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
|
64
.github/workflows/release-publish.yml
vendored
64
.github/workflows/release-publish.yml
vendored
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
name: authentik-on-release
|
name: authentik-on-release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -13,12 +14,16 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.1.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@ -35,24 +40,15 @@ jobs:
|
|||||||
mkdir -p ./gen-ts-api
|
mkdir -p ./gen-ts-api
|
||||||
mkdir -p ./gen-go-api
|
mkdir -p ./gen-go-api
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: true
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
tags: |
|
tags: ${{ steps.ev.outputs.imageTags }}
|
||||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
|
||||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
|
||||||
beryju/authentik:latest,
|
|
||||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
|
|
||||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
|
||||||
ghcr.io/goauthentik/server:latest
|
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
build-args: |
|
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
@ -72,12 +68,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v3.1.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
|
||||||
- name: make empty clients
|
- name: make empty clients
|
||||||
run: |
|
run: |
|
||||||
mkdir -p ./gen-ts-api
|
mkdir -p ./gen-ts-api
|
||||||
@ -94,22 +94,13 @@ jobs:
|
|||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: true
|
||||||
tags: |
|
tags: ${{ steps.ev.outputs.imageTags }}
|
||||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
|
||||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
|
||||||
beryju/authentik-${{ matrix.type }}:latest,
|
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
build-args: |
|
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -164,12 +155,12 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||||
docker-compose pull -q
|
docker compose pull -q
|
||||||
docker-compose up --no-start
|
docker compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker compose run -u root server test-all
|
||||||
sentry-release:
|
sentry-release:
|
||||||
needs:
|
needs:
|
||||||
- build-server
|
- build-server
|
||||||
@ -181,15 +172,18 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ghcr.io/goauthentik/server
|
||||||
- name: Get static files from docker image
|
- name: Get static files from docker image
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/goauthentik/server:latest
|
docker pull ${{ steps.ev.outputs.imageMainTag }}
|
||||||
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
container=$(docker container create ${{ steps.ev.outputs.imageMainTag }})
|
||||||
docker cp ${container}:web/ .
|
docker cp ${container}:web/ .
|
||||||
- name: Create a Sentry.io release
|
- name: Create a Sentry.io release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
if: ${{ github.event_name == 'release' }}
|
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: authentik-security-inc
|
SENTRY_ORG: authentik-security-inc
|
||||||
|
27
.github/workflows/release-tag.yml
vendored
27
.github/workflows/release-tag.yml
vendored
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
name: authentik-on-tag
|
name: authentik-on-tag
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -13,28 +14,28 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||||
docker buildx install
|
docker buildx install
|
||||||
mkdir -p ./gen-ts-api
|
mkdir -p ./gen-ts-api
|
||||||
docker build -t testing:latest .
|
docker build -t testing:latest .
|
||||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||||
echo "AUTHENTIK_TAG=latest" >> .env
|
echo "AUTHENTIK_TAG=latest" >> .env
|
||||||
docker-compose up --no-start
|
docker compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker compose run -u root server test-all
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- name: Extract version number
|
- name: prepare variables
|
||||||
id: get_version
|
uses: ./.github/actions/docker-push-variables
|
||||||
uses: actions/github-script@v7
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
with:
|
with:
|
||||||
github-token: ${{ steps.generate_token.outputs.token }}
|
image-name: ghcr.io/goauthentik/server
|
||||||
script: |
|
|
||||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1.1.4
|
uses: actions/create-release@v1.1.4
|
||||||
@ -42,6 +43,6 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ github.ref }}
|
tag_name: ${{ github.ref }}
|
||||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
release_name: Release ${{ steps.ev.outputs.version }}
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: false
|
prerelease: ${{ steps.ev.outputs.prerelease == 'true' }}
|
||||||
|
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
|||||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
|
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||||
stale-issue-label: wontfix
|
stale-issue-label: wontfix
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
4
.github/workflows/translation-advice.yml
vendored
4
.github/workflows/translation-advice.yml
vendored
@ -19,14 +19,14 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Find Comment
|
- name: Find Comment
|
||||||
uses: peter-evans/find-comment@v2
|
uses: peter-evans/find-comment@v3
|
||||||
id: fc
|
id: fc
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
comment-author: "github-actions[bot]"
|
comment-author: "github-actions[bot]"
|
||||||
body-includes: authentik translations instructions
|
body-includes: authentik translations instructions
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
uses: peter-evans/create-or-update-comment@v3
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
name: authentik-backend-translate-compile
|
---
|
||||||
|
name: authentik-backend-translate-extract-compile
|
||||||
on:
|
on:
|
||||||
push:
|
schedule:
|
||||||
branches: [main]
|
- cron: "0 0 * * *" # every day at midnight
|
||||||
paths:
|
|
||||||
- "locale/**"
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -25,16 +24,20 @@ jobs:
|
|||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
|
- name: run extract
|
||||||
|
run: |
|
||||||
|
poetry run make i18n-extract
|
||||||
- name: run compile
|
- name: run compile
|
||||||
run: poetry run ak compilemessages
|
run: |
|
||||||
|
poetry run ak compilemessages
|
||||||
|
make web-check-compile
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v6
|
||||||
id: cpr
|
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
branch: compile-backend-translation
|
branch: extract-compile-backend-translation
|
||||||
commit-message: "core: compile backend translations"
|
commit-message: "core, web: update translations"
|
||||||
title: "core: compile backend translations"
|
title: "core, web: update translations"
|
||||||
body: "core: compile backend translations"
|
body: "core, web: update translations"
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
signoff: true
|
signoff: true
|
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@ -10,8 +10,7 @@
|
|||||||
"Gruntfuggly.todo-tree",
|
"Gruntfuggly.todo-tree",
|
||||||
"mechatroner.rainbow-csv",
|
"mechatroner.rainbow-csv",
|
||||||
"ms-python.black-formatter",
|
"ms-python.black-formatter",
|
||||||
"ms-python.isort",
|
"charliermarsh.ruff",
|
||||||
"ms-python.pylint",
|
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"ms-python.black-formatter",
|
"ms-python.black-formatter",
|
||||||
|
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@ -4,20 +4,21 @@
|
|||||||
"asgi",
|
"asgi",
|
||||||
"authentik",
|
"authentik",
|
||||||
"authn",
|
"authn",
|
||||||
|
"entra",
|
||||||
"goauthentik",
|
"goauthentik",
|
||||||
"jwks",
|
"jwks",
|
||||||
|
"kubernetes",
|
||||||
"oidc",
|
"oidc",
|
||||||
"openid",
|
"openid",
|
||||||
|
"passwordless",
|
||||||
"plex",
|
"plex",
|
||||||
"saml",
|
"saml",
|
||||||
"totp",
|
|
||||||
"webauthn",
|
|
||||||
"traefik",
|
|
||||||
"passwordless",
|
|
||||||
"kubernetes",
|
|
||||||
"sso",
|
|
||||||
"slo",
|
|
||||||
"scim",
|
"scim",
|
||||||
|
"slo",
|
||||||
|
"sso",
|
||||||
|
"totp",
|
||||||
|
"traefik",
|
||||||
|
"webauthn",
|
||||||
],
|
],
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
"todo-tree.tree.showBadges": true,
|
"todo-tree.tree.showBadges": true,
|
||||||
|
@ -11,6 +11,8 @@ scripts/ @goauthentik/backend
|
|||||||
tests/ @goauthentik/backend
|
tests/ @goauthentik/backend
|
||||||
pyproject.toml @goauthentik/backend
|
pyproject.toml @goauthentik/backend
|
||||||
poetry.lock @goauthentik/backend
|
poetry.lock @goauthentik/backend
|
||||||
|
go.mod @goauthentik/backend
|
||||||
|
go.sum @goauthentik/backend
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
.github/ @goauthentik/infrastructure
|
.github/ @goauthentik/infrastructure
|
||||||
Dockerfile @goauthentik/infrastructure
|
Dockerfile @goauthentik/infrastructure
|
||||||
|
52
Dockerfile
52
Dockerfile
@ -1,7 +1,7 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
@ -14,30 +14,41 @@ RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.js
|
|||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./blueprints /work/blueprints/
|
COPY ./blueprints /work/blueprints/
|
||||||
|
COPY ./schema.yml /work/
|
||||||
COPY ./SECURITY.md /work/
|
COPY ./SECURITY.md /work/
|
||||||
|
|
||||||
RUN npm run build-docs-only
|
RUN npm run build-bundled
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
|
||||||
|
|
||||||
|
ARG GIT_BUILD_HASH
|
||||||
|
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
WORKDIR /work/web
|
WORKDIR /work/web
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||||
|
--mount=type=bind,target=/work/web/sfe/package.json,src=./web/sfe/package.json \
|
||||||
|
--mount=type=bind,target=/work/web/sfe/package-lock.json,src=./web/sfe/package-lock.json \
|
||||||
|
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
|
||||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
||||||
|
npm ci --include=dev && \
|
||||||
|
cd sfe && \
|
||||||
npm ci --include=dev
|
npm ci --include=dev
|
||||||
|
|
||||||
|
COPY ./package.json /work
|
||||||
COPY ./web /work/web/
|
COPY ./web /work/web/
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
RUN npm run build
|
RUN npm run build && \
|
||||||
|
cd sfe && \
|
||||||
|
npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.5-bookworm AS go-builder
|
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
|
||||||
|
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
@ -48,6 +59,11 @@ ARG GOARCH=$TARGETARCH
|
|||||||
|
|
||||||
WORKDIR /go/src/goauthentik.io
|
WORKDIR /go/src/goauthentik.io
|
||||||
|
|
||||||
|
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||||
|
dpkg --add-architecture arm64 && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||||
--mount=type=cache,target=/go/pkg/mod \
|
--mount=type=cache,target=/go/pkg/mod \
|
||||||
@ -62,17 +78,17 @@ COPY ./internal /go/src/goauthentik.io/internal
|
|||||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||||
|
|
||||||
ENV CGO_ENABLED=0
|
|
||||||
|
|
||||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
|
||||||
|
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
|
||||||
|
go build -o /go/authentik ./cmd/server
|
||||||
|
|
||||||
# Stage 4: MaxMind GeoIP
|
# Stage 4: MaxMind GeoIP
|
||||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
ENV GEOIPUPDATE_VERBOSE="1"
|
||||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||||
|
|
||||||
@ -83,7 +99,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM docker.io/python:3.12.1-slim-bookworm AS python-deps
|
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps
|
||||||
|
|
||||||
WORKDIR /ak-root/poetry
|
WORKDIR /ak-root/poetry
|
||||||
|
|
||||||
@ -96,19 +112,21 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa
|
|||||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
# Required for installing pip packages
|
# Required for installing pip packages
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
|
||||||
|
|
||||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||||
--mount=type=cache,target=/root/.cache/pip \
|
--mount=type=cache,target=/root/.cache/pip \
|
||||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||||
python -m venv /ak-root/venv/ && \
|
python -m venv /ak-root/venv/ && \
|
||||||
|
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||||
pip3 install --upgrade pip && \
|
pip3 install --upgrade pip && \
|
||||||
pip3 install poetry && \
|
pip3 install poetry && \
|
||||||
poetry install --only=main --no-ansi --no-interaction
|
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||||
|
pip install --force-reinstall /wheels/*"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.12.1-slim-bookworm AS final-image
|
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image
|
||||||
|
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
@ -125,7 +143,7 @@ WORKDIR /
|
|||||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \
|
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
@ -149,7 +167,7 @@ COPY --from=go-builder /go/authentik /bin/authentik
|
|||||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
COPY --from=website-builder /work/website/help/ /website/help/
|
COPY --from=website-builder /work/website/build/ /website/help/
|
||||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||||
|
|
||||||
USER 1000
|
USER 1000
|
||||||
@ -161,6 +179,8 @@ ENV TMPDIR=/dev/shm/ \
|
|||||||
VENV_PATH="/ak-root/venv" \
|
VENV_PATH="/ak-root/venv" \
|
||||||
POETRY_VIRTUALENVS_CREATE=false
|
POETRY_VIRTUALENVS_CREATE=false
|
||||||
|
|
||||||
|
ENV GOFIPS=1
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||||
|
|
||||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
||||||
|
133
Makefile
133
Makefile
@ -5,9 +5,13 @@ PWD = $(shell pwd)
|
|||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle
|
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
|
GEN_API_TS = "gen-ts-api"
|
||||||
|
GEN_API_PY = "gen-py-api"
|
||||||
|
GEN_API_GO = "gen-go-api"
|
||||||
|
|
||||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||||
@ -15,6 +19,7 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
|||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
-S 'web/src/locales/**' \
|
-S 'web/src/locales/**' \
|
||||||
|
-S 'website/developer-docs/api/reference/**' \
|
||||||
authentik \
|
authentik \
|
||||||
internal \
|
internal \
|
||||||
cmd \
|
cmd \
|
||||||
@ -42,12 +47,12 @@ test-go:
|
|||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker: ## Run all tests in a docker-compose
|
test-docker: ## Run all tests in a docker-compose
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||||
docker-compose pull -q
|
docker compose pull -q
|
||||||
docker-compose up --no-start
|
docker compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker compose run -u root server test-all
|
||||||
rm -f .env
|
rm -f .env
|
||||||
|
|
||||||
test: ## Run the server tests and produce a coverage report (locally)
|
test: ## Run the server tests and produce a coverage report (locally)
|
||||||
@ -55,28 +60,37 @@ test: ## Run the server tests and produce a coverage report (locally)
|
|||||||
coverage html
|
coverage html
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||||
isort $(PY_SOURCES)
|
|
||||||
black $(PY_SOURCES)
|
black $(PY_SOURCES)
|
||||||
ruff --fix $(PY_SOURCES)
|
ruff check --fix $(PY_SOURCES)
|
||||||
|
|
||||||
|
lint-codespell: ## Reports spelling errors.
|
||||||
codespell -w $(CODESPELL_ARGS)
|
codespell -w $(CODESPELL_ARGS)
|
||||||
|
|
||||||
lint: ## Lint the python and golang sources
|
lint: ## Lint the python and golang sources
|
||||||
bandit -r $(PY_SOURCES) -x node_modules
|
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
|
||||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
|
||||||
pylint $(PY_SOURCES)
|
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
|
core-install:
|
||||||
|
poetry install
|
||||||
|
|
||||||
migrate: ## Run the Authentik Django server's migrations
|
migrate: ## Run the Authentik Django server's migrations
|
||||||
python -m lifecycle.migrate
|
python -m lifecycle.migrate
|
||||||
|
|
||||||
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||||
|
|
||||||
i18n-extract-core:
|
core-i18n-extract:
|
||||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
ak makemessages \
|
||||||
|
--add-location file \
|
||||||
|
--no-obsolete \
|
||||||
|
--ignore web \
|
||||||
|
--ignore internal \
|
||||||
|
--ignore ${GEN_API_TS} \
|
||||||
|
--ignore ${GEN_API_GO} \
|
||||||
|
--ignore website \
|
||||||
|
-l en
|
||||||
|
|
||||||
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||||
poetry install
|
|
||||||
|
|
||||||
dev-drop-db:
|
dev-drop-db:
|
||||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
@ -94,8 +108,14 @@ dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik
|
|||||||
#########################
|
#########################
|
||||||
|
|
||||||
gen-build: ## Extract the schema from the database
|
gen-build: ## Extract the schema from the database
|
||||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
AUTHENTIK_DEBUG=true \
|
||||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
AUTHENTIK_TENANTS__ENABLED=true \
|
||||||
|
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||||
|
ak make_blueprint_schema > blueprints/schema.json
|
||||||
|
AUTHENTIK_DEBUG=true \
|
||||||
|
AUTHENTIK_TENANTS__ENABLED=true \
|
||||||
|
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||||
|
ak spectacular --file schema.yml
|
||||||
|
|
||||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||||
@ -106,7 +126,7 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
|||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
docker.io/openapitools/openapi-diff:2.1.0-beta.8 \
|
||||||
--markdown /local/diff.md \
|
--markdown /local/diff.md \
|
||||||
/local/old_schema.yml /local/schema.yml
|
/local/old_schema.yml /local/schema.yml
|
||||||
rm old_schema.yml
|
rm old_schema.yml
|
||||||
@ -114,48 +134,69 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
|||||||
sed -i 's/}/}/g' diff.md
|
sed -i 's/}/}/g' diff.md
|
||||||
npx prettier --write diff.md
|
npx prettier --write diff.md
|
||||||
|
|
||||||
gen-clean:
|
gen-clean-ts: ## Remove generated API client for Typescript
|
||||||
rm -rf gen-go-api/
|
rm -rf ./${GEN_API_TS}/
|
||||||
rm -rf gen-ts-api/
|
rm -rf ./web/node_modules/@goauthentik/api/
|
||||||
rm -rf web/node_modules/@goauthentik/api/
|
|
||||||
|
|
||||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
gen-clean-go: ## Remove generated API client for Go
|
||||||
|
rm -rf ./${GEN_API_GO}/
|
||||||
|
|
||||||
|
gen-clean-py: ## Remove generated API client for Python
|
||||||
|
rm -rf ./${GEN_API_PY}/
|
||||||
|
|
||||||
|
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
|
||||||
|
|
||||||
|
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g typescript-fetch \
|
-g typescript-fetch \
|
||||||
-o /local/gen-ts-api \
|
-o /local/${GEN_API_TS} \
|
||||||
-c /local/scripts/api-ts-config.yaml \
|
-c /local/scripts/api-ts-config.yaml \
|
||||||
--additional-properties=npmVersion=${NPM_VERSION} \
|
--additional-properties=npmVersion=${NPM_VERSION} \
|
||||||
--git-repo-id authentik \
|
--git-repo-id authentik \
|
||||||
--git-user-id goauthentik
|
--git-user-id goauthentik
|
||||||
mkdir -p web/node_modules/@goauthentik/api
|
mkdir -p web/node_modules/@goauthentik/api
|
||||||
cd gen-ts-api && npm i
|
cd ./${GEN_API_TS} && npm i
|
||||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-client-go: ## Build and install the authentik API for Golang
|
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
|
||||||
cp schema.yml ./gen-go-api/
|
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}/gen-go-api:/local \
|
--rm -v ${PWD}:/local \
|
||||||
|
--user ${UID}:${GID} \
|
||||||
|
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
|
||||||
|
-i /local/schema.yml \
|
||||||
|
-g python \
|
||||||
|
-o /local/${GEN_API_PY} \
|
||||||
|
-c /local/scripts/api-py-config.yaml \
|
||||||
|
--additional-properties=packageVersion=${NPM_VERSION} \
|
||||||
|
--git-repo-id authentik \
|
||||||
|
--git-user-id goauthentik
|
||||||
|
pip install ./${GEN_API_PY}
|
||||||
|
|
||||||
|
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
||||||
|
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
|
||||||
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
|
||||||
|
cp schema.yml ./${GEN_API_GO}/
|
||||||
|
docker run \
|
||||||
|
--rm -v ${PWD}/${GEN_API_GO}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g go \
|
-g go \
|
||||||
-o /local/ \
|
-o /local/ \
|
||||||
-c /local/config.yaml
|
-c /local/config.yaml
|
||||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
|
||||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
||||||
|
|
||||||
gen-dev-config: ## Generate a local development config file
|
gen-dev-config: ## Generate a local development config file
|
||||||
python -m scripts.generate_config
|
python -m scripts.generate_config
|
||||||
|
|
||||||
gen: gen-build gen-clean gen-client-ts
|
gen: gen-build gen-client-ts
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## Web
|
## Web
|
||||||
@ -164,7 +205,7 @@ gen: gen-build gen-clean gen-client-ts
|
|||||||
web-build: web-install ## Build the Authentik UI
|
web-build: web-install ## Build the Authentik UI
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||||
|
|
||||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
@ -200,7 +241,7 @@ website: website-lint-fix website-build ## Automatically fix formatting issues
|
|||||||
website-install:
|
website-install:
|
||||||
cd website && npm ci
|
cd website && npm ci
|
||||||
|
|
||||||
website-lint-fix:
|
website-lint-fix: lint-codespell
|
||||||
cd website && npm run prettier
|
cd website && npm run prettier
|
||||||
|
|
||||||
website-build:
|
website-build:
|
||||||
@ -214,6 +255,7 @@ website-watch: ## Build and watch the documentation website, updating automatic
|
|||||||
#########################
|
#########################
|
||||||
|
|
||||||
docker: ## Build a docker image of the current source tree
|
docker: ## Build a docker image of the current source tree
|
||||||
|
mkdir -p ${GEN_API_TS}
|
||||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
@ -226,9 +268,6 @@ ci--meta-debug:
|
|||||||
python -V
|
python -V
|
||||||
node --version
|
node --version
|
||||||
|
|
||||||
ci-pylint: ci--meta-debug
|
|
||||||
pylint $(PY_SOURCES)
|
|
||||||
|
|
||||||
ci-black: ci--meta-debug
|
ci-black: ci--meta-debug
|
||||||
black --check $(PY_SOURCES)
|
black --check $(PY_SOURCES)
|
||||||
|
|
||||||
@ -238,14 +277,8 @@ ci-ruff: ci--meta-debug
|
|||||||
ci-codespell: ci--meta-debug
|
ci-codespell: ci--meta-debug
|
||||||
codespell $(CODESPELL_ARGS) -s
|
codespell $(CODESPELL_ARGS) -s
|
||||||
|
|
||||||
ci-isort: ci--meta-debug
|
|
||||||
isort --check $(PY_SOURCES)
|
|
||||||
|
|
||||||
ci-bandit: ci--meta-debug
|
ci-bandit: ci--meta-debug
|
||||||
bandit -r $(PY_SOURCES)
|
bandit -r $(PY_SOURCES)
|
||||||
|
|
||||||
ci-pyright: ci--meta-debug
|
|
||||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
|
||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
ak makemigrations --check
|
ak makemigrations --check
|
||||||
|
@ -25,10 +25,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
|||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
| Light | Dark |
|
| Light | Dark |
|
||||||
| ------------------------------------------------------ | ----------------------------------------------------- |
|
| ----------------------------------------------------------- | ---------------------------------------------------------- |
|
||||||
|  |  |
|
|  |  |
|
||||||
|  |  |
|
|  |  |
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
|
20
SECURITY.md
20
SECURITY.md
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
|||||||
|
|
||||||
(.x being the latest patch release for each version)
|
(.x being the latest patch release for each version)
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| --- | --- |
|
| -------- | --------- |
|
||||||
| 2023.6.x | ✅ |
|
| 2024.4.x | ✅ |
|
||||||
| 2023.8.x | ✅ |
|
| 2024.6.x | ✅ |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
@ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
|
|||||||
|
|
||||||
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||||
|
|
||||||
| Score | Severity |
|
| Score | Severity |
|
||||||
| --- | --- |
|
| ---------- | -------- |
|
||||||
| 0.0 | None |
|
| 0.0 | None |
|
||||||
| 0.1 – 3.9 | Low |
|
| 0.1 – 3.9 | Low |
|
||||||
| 4.0 – 6.9 | Medium |
|
| 4.0 – 6.9 | Medium |
|
||||||
| 7.0 – 8.9 | High |
|
| 7.0 – 8.9 | High |
|
||||||
| 9.0 – 10.0 | Critical |
|
| 9.0 – 10.0 | Critical |
|
||||||
|
|
||||||
## Disclosure process
|
## Disclosure process
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
"""authentik root module"""
|
"""authentik root module"""
|
||||||
from os import environ
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
__version__ = "2023.10.5"
|
from os import environ
|
||||||
|
|
||||||
|
__version__ = "2024.6.1"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
def get_build_hash(fallback: str | None = None) -> str:
|
||||||
"""Get build hash"""
|
"""Get build hash"""
|
||||||
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||||
return fallback if build_hash == "" and fallback else build_hash
|
return fallback if build_hash == "" and fallback else build_hash
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Meta API"""
|
"""Meta API"""
|
||||||
|
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik administration metrics"""
|
"""authentik administration metrics"""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.db.models.functions import ExtractHour
|
from django.db.models.functions import ExtractHour
|
||||||
|
@ -1,18 +1,23 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from ssl import OPENSSL_VERSION
|
||||||
from sys import version as python_version
|
from sys import version as python_version
|
||||||
from typing import TypedDict
|
from typing import TypedDict
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.openssl.backend import backend
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from gunicorn import version_info as gunicorn_version
|
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from authentik import get_full_version
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.enterprise.license import LicenseKey
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
@ -23,11 +28,13 @@ class RuntimeDict(TypedDict):
|
|||||||
"""Runtime information"""
|
"""Runtime information"""
|
||||||
|
|
||||||
python_version: str
|
python_version: str
|
||||||
gunicorn_version: str
|
|
||||||
environment: str
|
environment: str
|
||||||
architecture: str
|
architecture: str
|
||||||
platform: str
|
platform: str
|
||||||
uname: str
|
uname: str
|
||||||
|
openssl_version: str
|
||||||
|
openssl_fips_enabled: bool | None
|
||||||
|
authentik_version: str
|
||||||
|
|
||||||
|
|
||||||
class SystemInfoSerializer(PassiveSerializer):
|
class SystemInfoSerializer(PassiveSerializer):
|
||||||
@ -37,8 +44,9 @@ class SystemInfoSerializer(PassiveSerializer):
|
|||||||
http_host = SerializerMethodField()
|
http_host = SerializerMethodField()
|
||||||
http_is_secure = SerializerMethodField()
|
http_is_secure = SerializerMethodField()
|
||||||
runtime = SerializerMethodField()
|
runtime = SerializerMethodField()
|
||||||
tenant = SerializerMethodField()
|
brand = SerializerMethodField()
|
||||||
server_time = SerializerMethodField()
|
server_time = SerializerMethodField()
|
||||||
|
embedded_outpost_disabled = SerializerMethodField()
|
||||||
embedded_outpost_host = SerializerMethodField()
|
embedded_outpost_host = SerializerMethodField()
|
||||||
|
|
||||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||||
@ -61,22 +69,30 @@ class SystemInfoSerializer(PassiveSerializer):
|
|||||||
def get_runtime(self, request: Request) -> RuntimeDict:
|
def get_runtime(self, request: Request) -> RuntimeDict:
|
||||||
"""Get versions"""
|
"""Get versions"""
|
||||||
return {
|
return {
|
||||||
"python_version": python_version,
|
|
||||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
|
||||||
"environment": get_env(),
|
|
||||||
"architecture": platform.machine(),
|
"architecture": platform.machine(),
|
||||||
|
"authentik_version": get_full_version(),
|
||||||
|
"environment": get_env(),
|
||||||
|
"openssl_fips_enabled": (
|
||||||
|
backend._fips_enabled if LicenseKey.get_total().is_valid() else None
|
||||||
|
),
|
||||||
|
"openssl_version": OPENSSL_VERSION,
|
||||||
"platform": platform.platform(),
|
"platform": platform.platform(),
|
||||||
|
"python_version": python_version,
|
||||||
"uname": " ".join(platform.uname()),
|
"uname": " ".join(platform.uname()),
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_tenant(self, request: Request) -> str:
|
def get_brand(self, request: Request) -> str:
|
||||||
"""Currently active tenant"""
|
"""Currently active brand"""
|
||||||
return str(request._request.tenant)
|
return str(request._request.brand)
|
||||||
|
|
||||||
def get_server_time(self, request: Request) -> datetime:
|
def get_server_time(self, request: Request) -> datetime:
|
||||||
"""Current server time"""
|
"""Current server time"""
|
||||||
return now()
|
return now()
|
||||||
|
|
||||||
|
def get_embedded_outpost_disabled(self, request: Request) -> bool:
|
||||||
|
"""Whether the embedded outpost is disabled"""
|
||||||
|
return CONFIG.get_bool("outposts.disable_embedded_outpost", False)
|
||||||
|
|
||||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||||
"""Get the FQDN configured on the embedded outpost"""
|
"""Get the FQDN configured on the embedded outpost"""
|
||||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||||
|
@ -1,134 +0,0 @@
|
|||||||
"""Tasks API"""
|
|
||||||
from importlib import import_module
|
|
||||||
|
|
||||||
from django.contrib import messages
|
|
||||||
from django.http.response import Http404
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
|
||||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
|
||||||
from rest_framework.decorators import action
|
|
||||||
from rest_framework.fields import (
|
|
||||||
CharField,
|
|
||||||
ChoiceField,
|
|
||||||
DateTimeField,
|
|
||||||
ListField,
|
|
||||||
SerializerMethodField,
|
|
||||||
)
|
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.viewsets import ViewSet
|
|
||||||
from structlog.stdlib import get_logger
|
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
|
||||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
class TaskSerializer(PassiveSerializer):
|
|
||||||
"""Serialize TaskInfo and TaskResult"""
|
|
||||||
|
|
||||||
task_name = CharField()
|
|
||||||
task_description = CharField()
|
|
||||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
|
||||||
task_duration = SerializerMethodField()
|
|
||||||
|
|
||||||
status = ChoiceField(
|
|
||||||
source="result.status.name",
|
|
||||||
choices=[(x.name, x.name) for x in TaskResultStatus],
|
|
||||||
)
|
|
||||||
messages = ListField(source="result.messages")
|
|
||||||
|
|
||||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
|
||||||
"""Get the duration a task took to run"""
|
|
||||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
|
||||||
|
|
||||||
def to_representation(self, instance: TaskInfo):
|
|
||||||
"""When a new version of authentik adds fields to TaskInfo,
|
|
||||||
the API will fail with an AttributeError, as the classes
|
|
||||||
are pickled in cache. In that case, just delete the info"""
|
|
||||||
try:
|
|
||||||
return super().to_representation(instance)
|
|
||||||
# pylint: disable=broad-except
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
if isinstance(self.instance, list):
|
|
||||||
for inst in self.instance:
|
|
||||||
inst.delete()
|
|
||||||
else:
|
|
||||||
self.instance.delete()
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TaskViewSet(ViewSet):
|
|
||||||
"""Read-only view set that returns all background tasks"""
|
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
|
|
||||||
serializer_class = TaskSerializer
|
|
||||||
|
|
||||||
@extend_schema(
|
|
||||||
responses={
|
|
||||||
200: TaskSerializer(many=False),
|
|
||||||
404: OpenApiResponse(description="Task not found"),
|
|
||||||
},
|
|
||||||
parameters=[
|
|
||||||
OpenApiParameter(
|
|
||||||
"id",
|
|
||||||
type=OpenApiTypes.STR,
|
|
||||||
location=OpenApiParameter.PATH,
|
|
||||||
required=True,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def retrieve(self, request: Request, pk=None) -> Response:
|
|
||||||
"""Get a single system task"""
|
|
||||||
task = TaskInfo.by_name(pk)
|
|
||||||
if not task:
|
|
||||||
raise Http404
|
|
||||||
return Response(TaskSerializer(task, many=False).data)
|
|
||||||
|
|
||||||
@extend_schema(responses={200: TaskSerializer(many=True)})
|
|
||||||
def list(self, request: Request) -> Response:
|
|
||||||
"""List system tasks"""
|
|
||||||
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
|
||||||
return Response(TaskSerializer(tasks, many=True).data)
|
|
||||||
|
|
||||||
@permission_required(None, ["authentik_rbac.run_system_tasks"])
|
|
||||||
@extend_schema(
|
|
||||||
request=OpenApiTypes.NONE,
|
|
||||||
responses={
|
|
||||||
204: OpenApiResponse(description="Task retried successfully"),
|
|
||||||
404: OpenApiResponse(description="Task not found"),
|
|
||||||
500: OpenApiResponse(description="Failed to retry task"),
|
|
||||||
},
|
|
||||||
parameters=[
|
|
||||||
OpenApiParameter(
|
|
||||||
"id",
|
|
||||||
type=OpenApiTypes.STR,
|
|
||||||
location=OpenApiParameter.PATH,
|
|
||||||
required=True,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
@action(detail=True, methods=["post"])
|
|
||||||
def retry(self, request: Request, pk=None) -> Response:
|
|
||||||
"""Retry task"""
|
|
||||||
task = TaskInfo.by_name(pk)
|
|
||||||
if not task:
|
|
||||||
raise Http404
|
|
||||||
try:
|
|
||||||
task_module = import_module(task.task_call_module)
|
|
||||||
task_func = getattr(task_module, task.task_call_func)
|
|
||||||
LOGGER.debug("Running task", task=task_func)
|
|
||||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
|
||||||
messages.success(
|
|
||||||
self.request,
|
|
||||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
|
||||||
)
|
|
||||||
return Response(status=204)
|
|
||||||
except (ImportError, AttributeError): # pragma: no cover
|
|
||||||
LOGGER.warning("Failed to run task, remove state", task=task)
|
|
||||||
# if we get an import error, the module path has probably changed
|
|
||||||
task.delete()
|
|
||||||
return Response(status=500)
|
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from packaging.version import parse
|
from packaging.version import parse
|
||||||
@ -9,7 +10,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik import __version__, get_build_hash
|
from authentik import __version__, get_build_hash
|
||||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
|
||||||
|
|
||||||
@ -18,6 +19,7 @@ class VersionSerializer(PassiveSerializer):
|
|||||||
|
|
||||||
version_current = SerializerMethodField()
|
version_current = SerializerMethodField()
|
||||||
version_latest = SerializerMethodField()
|
version_latest = SerializerMethodField()
|
||||||
|
version_latest_valid = SerializerMethodField()
|
||||||
build_hash = SerializerMethodField()
|
build_hash = SerializerMethodField()
|
||||||
outdated = SerializerMethodField()
|
outdated = SerializerMethodField()
|
||||||
|
|
||||||
@ -37,6 +39,10 @@ class VersionSerializer(PassiveSerializer):
|
|||||||
return __version__
|
return __version__
|
||||||
return version_in_cache
|
return version_in_cache
|
||||||
|
|
||||||
|
def get_version_latest_valid(self, _) -> bool:
|
||||||
|
"""Check if latest version is valid"""
|
||||||
|
return cache.get(VERSION_CACHE_KEY) != VERSION_NULL
|
||||||
|
|
||||||
def get_outdated(self, instance) -> bool:
|
def get_outdated(self, instance) -> bool:
|
||||||
"""Check if we're running the latest version"""
|
"""Check if we're running the latest version"""
|
||||||
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
|
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.fields import IntegerField
|
from rest_framework.fields import IntegerField
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik admin app config"""
|
"""authentik admin app config"""
|
||||||
|
|
||||||
from prometheus_client import Gauge, Info
|
from prometheus_client import Gauge, Info
|
||||||
|
|
||||||
from authentik.blueprints.apps import ManagedAppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
@ -14,7 +15,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
|
|||||||
label = "authentik_admin"
|
label = "authentik_admin"
|
||||||
verbose_name = "authentik Admin"
|
verbose_name = "authentik Admin"
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
def reconcile_load_admin_signals(self):
|
|
||||||
"""Load admin signals"""
|
|
||||||
self.import_module("authentik.admin.signals")
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik admin settings"""
|
"""authentik admin settings"""
|
||||||
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
|
|
||||||
from authentik.lib.utils.time import fqdn_rand
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""admin signals"""
|
"""admin signals"""
|
||||||
|
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
from authentik.admin.api.tasks import TaskInfo
|
|
||||||
from authentik.admin.apps import GAUGE_WORKERS
|
from authentik.admin.apps import GAUGE_WORKERS
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
from authentik.root.monitoring import monitoring_set
|
from authentik.root.monitoring import monitoring_set
|
||||||
@ -12,10 +12,3 @@ def monitoring_set_workers(sender, **kwargs):
|
|||||||
"""Set worker gauge"""
|
"""Set worker gauge"""
|
||||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||||
GAUGE_WORKERS.set(count)
|
GAUGE_WORKERS.set(count)
|
||||||
|
|
||||||
|
|
||||||
@receiver(monitoring_set)
|
|
||||||
def monitoring_set_tasks(sender, **kwargs):
|
|
||||||
"""Set task gauges"""
|
|
||||||
for task in TaskInfo.all().values():
|
|
||||||
task.update_metrics()
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik admin tasks"""
|
"""authentik admin tasks"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
@ -11,17 +12,13 @@ from structlog.stdlib import get_logger
|
|||||||
from authentik import __version__, get_build_hash
|
from authentik import __version__, get_build_hash
|
||||||
from authentik.admin.apps import PROM_INFO
|
from authentik.admin.apps import PROM_INFO
|
||||||
from authentik.events.models import Event, EventAction, Notification
|
from authentik.events.models import Event, EventAction, Notification
|
||||||
from authentik.events.monitored_tasks import (
|
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||||
MonitoredTask,
|
|
||||||
TaskResult,
|
|
||||||
TaskResultStatus,
|
|
||||||
prefill_task,
|
|
||||||
)
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.http import get_http_session
|
from authentik.lib.utils.http import get_http_session
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
VERSION_NULL = "0.0.0"
|
||||||
VERSION_CACHE_KEY = "authentik_latest_version"
|
VERSION_CACHE_KEY = "authentik_latest_version"
|
||||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||||
# Chop of the first ^ because we want to search the entire string
|
# Chop of the first ^ because we want to search the entire string
|
||||||
@ -54,13 +51,13 @@ def clear_update_notifications():
|
|||||||
notification.delete()
|
notification.delete()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def update_latest_version(self: MonitoredTask):
|
def update_latest_version(self: SystemTask):
|
||||||
"""Update latest version info"""
|
"""Update latest version info"""
|
||||||
if CONFIG.get_bool("disable_update_check"):
|
if CONFIG.get_bool("disable_update_check"):
|
||||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
self.set_status(TaskStatus.WARNING, "Version check disabled.")
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
response = get_http_session().get(
|
response = get_http_session().get(
|
||||||
@ -70,9 +67,7 @@ def update_latest_version(self: MonitoredTask):
|
|||||||
data = response.json()
|
data = response.json()
|
||||||
upstream_version = data.get("stable", {}).get("version")
|
upstream_version = data.get("stable", {}).get("version")
|
||||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(
|
self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
|
||||||
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
|
||||||
)
|
|
||||||
_set_prom_info()
|
_set_prom_info()
|
||||||
# Check if upstream version is newer than what we're running,
|
# Check if upstream version is newer than what we're running,
|
||||||
# and if no event exists yet, create one.
|
# and if no event exists yet, create one.
|
||||||
@ -88,8 +83,8 @@ def update_latest_version(self: MonitoredTask):
|
|||||||
event_dict["message"] = f"Changelog: {match.group()}"
|
event_dict["message"] = f"Changelog: {match.group()}"
|
||||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||||
except (RequestException, IndexError) as exc:
|
except (RequestException, IndexError) as exc:
|
||||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
self.set_error(exc)
|
||||||
|
|
||||||
|
|
||||||
_set_prom_info()
|
_set_prom_info()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""test admin api"""
|
"""test admin api"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@ -7,8 +8,6 @@ from django.urls import reverse
|
|||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.blueprints.tests import reconcile_app
|
from authentik.blueprints.tests import reconcile_app
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.core.tasks import clean_expired_models
|
|
||||||
from authentik.events.monitored_tasks import TaskResultStatus
|
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
@ -23,53 +22,6 @@ class TestAdminAPI(TestCase):
|
|||||||
self.group.save()
|
self.group.save()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_tasks(self):
|
|
||||||
"""Test Task API"""
|
|
||||||
clean_expired_models.delay()
|
|
||||||
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
body = loads(response.content)
|
|
||||||
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
|
|
||||||
|
|
||||||
def test_tasks_single(self):
|
|
||||||
"""Test Task API (read single)"""
|
|
||||||
clean_expired_models.delay()
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:admin_system_tasks-detail",
|
|
||||||
kwargs={"pk": "clean_expired_models"},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
body = loads(response.content)
|
|
||||||
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
|
|
||||||
self.assertEqual(body["task_name"], "clean_expired_models")
|
|
||||||
response = self.client.get(
|
|
||||||
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 404)
|
|
||||||
|
|
||||||
def test_tasks_retry(self):
|
|
||||||
"""Test Task API (retry)"""
|
|
||||||
clean_expired_models.delay()
|
|
||||||
response = self.client.post(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:admin_system_tasks-retry",
|
|
||||||
kwargs={"pk": "clean_expired_models"},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 204)
|
|
||||||
|
|
||||||
def test_tasks_retry_404(self):
|
|
||||||
"""Test Task API (retry, 404)"""
|
|
||||||
response = self.client.post(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:admin_system_tasks-retry",
|
|
||||||
kwargs={"pk": "qwerqewrqrqewrqewr"},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 404)
|
|
||||||
|
|
||||||
def test_version(self):
|
def test_version(self):
|
||||||
"""Test Version API"""
|
"""Test Version API"""
|
||||||
response = self.client.get(reverse("authentik_api:admin_version"))
|
response = self.client.get(reverse("authentik_api:admin_version"))
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""test admin tasks"""
|
"""test admin tasks"""
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||||
from authentik.admin.api.system import SystemView
|
from authentik.admin.api.system import SystemView
|
||||||
from authentik.admin.api.tasks import TaskViewSet
|
|
||||||
from authentik.admin.api.version import VersionView
|
from authentik.admin.api.version import VersionView
|
||||||
from authentik.admin.api.workers import WorkerView
|
from authentik.admin.api.workers import WorkerView
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
|
|
||||||
("admin/apps", AppsViewSet, "apps"),
|
("admin/apps", AppsViewSet, "apps"),
|
||||||
("admin/models", ModelViewSet, "models"),
|
("admin/models", ModelViewSet, "models"),
|
||||||
path(
|
path(
|
||||||
|
@ -10,26 +10,3 @@ class AuthentikAPIConfig(AppConfig):
|
|||||||
label = "authentik_api"
|
label = "authentik_api"
|
||||||
mountpoint = "api/"
|
mountpoint = "api/"
|
||||||
verbose_name = "authentik API"
|
verbose_name = "authentik API"
|
||||||
|
|
||||||
def ready(self) -> None:
|
|
||||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
|
||||||
|
|
||||||
from authentik.api.authentication import TokenAuthentication
|
|
||||||
|
|
||||||
# Class is defined here as it needs to be created early enough that drf-spectacular will
|
|
||||||
# find it, but also won't cause any import issues
|
|
||||||
# pylint: disable=unused-variable
|
|
||||||
class TokenSchema(OpenApiAuthenticationExtension):
|
|
||||||
"""Auth schema"""
|
|
||||||
|
|
||||||
target_class = TokenAuthentication
|
|
||||||
name = "authentik"
|
|
||||||
|
|
||||||
def get_security_definition(self, auto_schema):
|
|
||||||
"""Auth schema"""
|
|
||||||
return {
|
|
||||||
"type": "apiKey",
|
|
||||||
"in": "header",
|
|
||||||
"name": "Authorization",
|
|
||||||
"scheme": "bearer",
|
|
||||||
}
|
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
"""API Authentication"""
|
"""API Authentication"""
|
||||||
|
|
||||||
from hmac import compare_digest
|
from hmac import compare_digest
|
||||||
from typing import Any, Optional
|
from typing import Any
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||||
from rest_framework.exceptions import AuthenticationFailed
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
@ -16,7 +18,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
def validate_auth(header: bytes) -> Optional[str]:
|
def validate_auth(header: bytes) -> str | None:
|
||||||
"""Validate that the header is in a correct format,
|
"""Validate that the header is in a correct format,
|
||||||
returns type and credentials"""
|
returns type and credentials"""
|
||||||
auth_credentials = header.decode().strip()
|
auth_credentials = header.decode().strip()
|
||||||
@ -31,7 +33,7 @@ def validate_auth(header: bytes) -> Optional[str]:
|
|||||||
return auth_credentials
|
return auth_credentials
|
||||||
|
|
||||||
|
|
||||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
def bearer_auth(raw_header: bytes) -> User | None:
|
||||||
"""raw_header in the Format of `Bearer ....`"""
|
"""raw_header in the Format of `Bearer ....`"""
|
||||||
user = auth_user_lookup(raw_header)
|
user = auth_user_lookup(raw_header)
|
||||||
if not user:
|
if not user:
|
||||||
@ -41,7 +43,7 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
def auth_user_lookup(raw_header: bytes) -> User | None:
|
||||||
"""raw_header in the Format of `Bearer ....`"""
|
"""raw_header in the Format of `Bearer ....`"""
|
||||||
from authentik.providers.oauth2.models import AccessToken
|
from authentik.providers.oauth2.models import AccessToken
|
||||||
|
|
||||||
@ -74,7 +76,7 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
|||||||
raise AuthenticationFailed("Token invalid/expired")
|
raise AuthenticationFailed("Token invalid/expired")
|
||||||
|
|
||||||
|
|
||||||
def token_secret_key(value: str) -> Optional[User]:
|
def token_secret_key(value: str) -> User | None:
|
||||||
"""Check if the token is the secret key
|
"""Check if the token is the secret key
|
||||||
and return the service account for the managed outpost"""
|
and return the service account for the managed outpost"""
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
@ -101,3 +103,14 @@ class TokenAuthentication(BaseAuthentication):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
return (user, None) # pragma: no cover
|
return (user, None) # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
|
class TokenSchema(OpenApiAuthenticationExtension):
|
||||||
|
"""Auth schema"""
|
||||||
|
|
||||||
|
target_class = TokenAuthentication
|
||||||
|
name = "authentik"
|
||||||
|
|
||||||
|
def get_security_definition(self, auto_schema):
|
||||||
|
"""Auth schema"""
|
||||||
|
return {"type": "http", "scheme": "bearer"}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""API Authorization"""
|
"""API Authorization"""
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Pagination which includes total pages and current page"""
|
"""Pagination which includes total pages and current page"""
|
||||||
|
|
||||||
from rest_framework import pagination
|
from rest_framework import pagination
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.generators import SchemaGenerator
|
from drf_spectacular.generators import SchemaGenerator
|
||||||
from drf_spectacular.plumbing import (
|
from drf_spectacular.plumbing import (
|
||||||
@ -11,6 +12,7 @@ from drf_spectacular.settings import spectacular_settings
|
|||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from rest_framework.settings import api_settings
|
from rest_framework.settings import api_settings
|
||||||
|
|
||||||
|
from authentik.api.apps import AuthentikAPIConfig
|
||||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
||||||
|
|
||||||
|
|
||||||
@ -100,3 +102,12 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
|
|||||||
comp = result["components"]["schemas"][component]
|
comp = result["components"]["schemas"][component]
|
||||||
comp["additionalProperties"] = {}
|
comp["additionalProperties"] = {}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
|
||||||
|
"""Filter out all API Views which are not mounted under /api"""
|
||||||
|
return [
|
||||||
|
(path, path_regex, method, callback)
|
||||||
|
for path, path_regex, method, callback in endpoints
|
||||||
|
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
|
||||||
|
]
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
{% extends "base/skeleton.html" %}
|
{% extends "base/skeleton.html" %}
|
||||||
|
|
||||||
{% load static %}
|
{% load authentik_core %}
|
||||||
|
|
||||||
{% block title %}
|
{% block title %}
|
||||||
API Browser - {{ tenant.branding_title }}
|
API Browser - {{ brand.branding_title }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
|
||||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test API Authentication"""
|
"""Test API Authentication"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
@ -24,17 +25,17 @@ class TestAPIAuth(TestCase):
|
|||||||
def test_invalid_type(self):
|
def test_invalid_type(self):
|
||||||
"""Test invalid type"""
|
"""Test invalid type"""
|
||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
bearer_auth("foo bar".encode())
|
bearer_auth(b"foo bar")
|
||||||
|
|
||||||
def test_invalid_empty(self):
|
def test_invalid_empty(self):
|
||||||
"""Test invalid type"""
|
"""Test invalid type"""
|
||||||
self.assertIsNone(bearer_auth("Bearer ".encode()))
|
self.assertIsNone(bearer_auth(b"Bearer "))
|
||||||
self.assertIsNone(bearer_auth("".encode()))
|
self.assertIsNone(bearer_auth(b""))
|
||||||
|
|
||||||
def test_invalid_no_token(self):
|
def test_invalid_no_token(self):
|
||||||
"""Test invalid with no token"""
|
"""Test invalid with no token"""
|
||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
auth = b64encode(":abc".encode()).decode()
|
auth = b64encode(b":abc").decode()
|
||||||
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
|
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
|
||||||
|
|
||||||
def test_bearer_valid(self):
|
def test_bearer_valid(self):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test config API"""
|
"""Test config API"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
"""test decorators api"""
|
|
||||||
from django.urls import reverse
|
|
||||||
from guardian.shortcuts import assign_perm
|
|
||||||
from rest_framework.test import APITestCase
|
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
|
||||||
from authentik.lib.generators import generate_id
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIDecorators(APITestCase):
|
|
||||||
"""test decorators api"""
|
|
||||||
|
|
||||||
def setUp(self) -> None:
|
|
||||||
super().setUp()
|
|
||||||
self.user = User.objects.create(username="test-user")
|
|
||||||
|
|
||||||
def test_obj_perm_denied(self):
|
|
||||||
"""Test object perm denied"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
|
||||||
response = self.client.get(
|
|
||||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 403)
|
|
||||||
|
|
||||||
def test_other_perm_denied(self):
|
|
||||||
"""Test other perm denied"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
|
||||||
assign_perm("authentik_core.view_application", self.user, app)
|
|
||||||
response = self.client.get(
|
|
||||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 403)
|
|
@ -1,4 +1,5 @@
|
|||||||
"""Schema generation tests"""
|
"""Schema generation tests"""
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
from yaml import safe_load
|
from yaml import safe_load
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""authentik API Modelviewset tests"""
|
"""authentik API Modelviewset tests"""
|
||||||
from typing import Callable
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||||
@ -25,6 +26,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
|||||||
|
|
||||||
|
|
||||||
for _, viewset, _ in router.registry:
|
for _, viewset, _ in router.registry:
|
||||||
if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
|
if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet):
|
||||||
continue
|
continue
|
||||||
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))
|
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik api urls"""
|
"""authentik api urls"""
|
||||||
|
|
||||||
from django.urls import include, path
|
from django.urls import include, path
|
||||||
|
|
||||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""core Configs API"""
|
"""core Configs API"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -67,12 +68,16 @@ class ConfigView(APIView):
|
|||||||
"""Get all capabilities this server instance supports"""
|
"""Get all capabilities this server instance supports"""
|
||||||
caps = []
|
caps = []
|
||||||
deb_test = settings.DEBUG or settings.TEST
|
deb_test = settings.DEBUG or settings.TEST
|
||||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
if (
|
||||||
|
CONFIG.get("storage.media.backend", "file") == "s3"
|
||||||
|
or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount()
|
||||||
|
or deb_test
|
||||||
|
):
|
||||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||||
for processor in get_context_processors():
|
for processor in get_context_processors():
|
||||||
if cap := processor.capability():
|
if cap := processor.capability():
|
||||||
caps.append(cap)
|
caps.append(cap)
|
||||||
if CONFIG.get_bool("impersonation"):
|
if self.request.tenant.impersonation:
|
||||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||||
if settings.DEBUG: # pragma: no cover
|
if settings.DEBUG: # pragma: no cover
|
||||||
caps.append(Capabilities.CAN_DEBUG)
|
caps.append(Capabilities.CAN_DEBUG)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""api v3 urls"""
|
"""api v3 urls"""
|
||||||
|
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
@ -32,7 +33,7 @@ for _authentik_app in get_apps():
|
|||||||
app_name=_authentik_app.name,
|
app_name=_authentik_app.name,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
urls: list = getattr(api_urls, "api_urlpatterns")
|
urls: list = api_urls.api_urlpatterns
|
||||||
for url in urls:
|
for url in urls:
|
||||||
if isinstance(url, URLPattern):
|
if isinstance(url, URLPattern):
|
||||||
_other_urls.append(url)
|
_other_urls.append(url)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""General API Views"""
|
"""General API Views"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Serializer mixin for managed models"""
|
"""Serializer mixin for managed models"""
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
@ -9,13 +10,13 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||||
|
from authentik.rbac.decorators import permission_required
|
||||||
|
|
||||||
|
|
||||||
class ManagedSerializer:
|
class ManagedSerializer:
|
||||||
@ -51,7 +52,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||||||
valid, logs = Importer.from_string(content, context).validate()
|
valid, logs = Importer.from_string(content, context).validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
text_logs = "\n".join([x["event"] for x in logs])
|
text_logs = "\n".join([x["event"] for x in logs])
|
||||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
raise ValidationError(
|
||||||
|
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
|
||||||
|
)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
def validate(self, attrs: dict) -> dict:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""authentik Blueprints app"""
|
"""authentik Blueprints app"""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from inspect import ismethod
|
from inspect import ismethod
|
||||||
|
|
||||||
@ -7,40 +8,100 @@ from django.apps import AppConfig
|
|||||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
|
from authentik.root.signals import startup
|
||||||
|
|
||||||
|
|
||||||
class ManagedAppConfig(AppConfig):
|
class ManagedAppConfig(AppConfig):
|
||||||
"""Basic reconciliation logic for apps"""
|
"""Basic reconciliation logic for apps"""
|
||||||
|
|
||||||
_logger: BoundLogger
|
logger: BoundLogger
|
||||||
|
|
||||||
|
RECONCILE_GLOBAL_CATEGORY: str = "global"
|
||||||
|
RECONCILE_TENANT_CATEGORY: str = "tenant"
|
||||||
|
|
||||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||||
super().__init__(app_name, *args, **kwargs)
|
super().__init__(app_name, *args, **kwargs)
|
||||||
self._logger = get_logger().bind(app_name=app_name)
|
self.logger = get_logger().bind(app_name=app_name)
|
||||||
|
|
||||||
def ready(self) -> None:
|
def ready(self) -> None:
|
||||||
self.reconcile()
|
self.import_related()
|
||||||
|
startup.connect(self._on_startup_callback, dispatch_uid=self.label)
|
||||||
return super().ready()
|
return super().ready()
|
||||||
|
|
||||||
|
def _on_startup_callback(self, sender, **_):
|
||||||
|
self._reconcile_global()
|
||||||
|
self._reconcile_tenant()
|
||||||
|
|
||||||
|
def import_related(self):
|
||||||
|
"""Automatically import related modules which rely on just being imported
|
||||||
|
to register themselves (mainly django signals and celery tasks)"""
|
||||||
|
|
||||||
|
def import_relative(rel_module: str):
|
||||||
|
try:
|
||||||
|
module_name = f"{self.name}.{rel_module}"
|
||||||
|
import_module(module_name)
|
||||||
|
self.logger.info("Imported related module", module=module_name)
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
import_relative("checks")
|
||||||
|
import_relative("tasks")
|
||||||
|
import_relative("signals")
|
||||||
|
|
||||||
def import_module(self, path: str):
|
def import_module(self, path: str):
|
||||||
"""Load module"""
|
"""Load module"""
|
||||||
import_module(path)
|
import_module(path)
|
||||||
|
|
||||||
def reconcile(self) -> None:
|
def _reconcile(self, prefix: str) -> None:
|
||||||
"""reconcile ourselves"""
|
|
||||||
prefix = "reconcile_"
|
|
||||||
for meth_name in dir(self):
|
for meth_name in dir(self):
|
||||||
meth = getattr(self, meth_name)
|
meth = getattr(self, meth_name)
|
||||||
if not ismethod(meth):
|
if not ismethod(meth):
|
||||||
continue
|
continue
|
||||||
if not meth_name.startswith(prefix):
|
category = getattr(meth, "_authentik_managed_reconcile", None)
|
||||||
|
if category != prefix:
|
||||||
continue
|
continue
|
||||||
name = meth_name.replace(prefix, "")
|
name = meth_name.replace(prefix, "")
|
||||||
try:
|
try:
|
||||||
self._logger.debug("Starting reconciler", name=name)
|
self.logger.debug("Starting reconciler", name=name)
|
||||||
meth()
|
meth()
|
||||||
self._logger.debug("Successfully reconciled", name=name)
|
self.logger.debug("Successfully reconciled", name=name)
|
||||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
self.logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconcile_tenant(func: Callable):
|
||||||
|
"""Mark a function to be called on startup (for each tenant)"""
|
||||||
|
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY
|
||||||
|
return func
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconcile_global(func: Callable):
|
||||||
|
"""Mark a function to be called on startup (globally)"""
|
||||||
|
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
|
||||||
|
return func
|
||||||
|
|
||||||
|
def _reconcile_tenant(self) -> None:
|
||||||
|
"""reconcile ourselves for tenanted methods"""
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
try:
|
||||||
|
tenants = list(Tenant.objects.filter(ready=True))
|
||||||
|
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||||
|
self.logger.debug("Failed to get tenants to run reconcile", exc=exc)
|
||||||
|
return
|
||||||
|
for tenant in tenants:
|
||||||
|
with tenant:
|
||||||
|
self._reconcile(self.RECONCILE_TENANT_CATEGORY)
|
||||||
|
|
||||||
|
def _reconcile_global(self) -> None:
|
||||||
|
"""
|
||||||
|
reconcile ourselves for global methods.
|
||||||
|
Used for signals, tasks, etc. Database queries should not be made in here.
|
||||||
|
"""
|
||||||
|
from django_tenants.utils import get_public_schema_name, schema_context
|
||||||
|
|
||||||
|
with schema_context(get_public_schema_name()):
|
||||||
|
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
|
||||||
|
|
||||||
|
|
||||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||||
@ -51,11 +112,13 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
|||||||
verbose_name = "authentik Blueprints"
|
verbose_name = "authentik Blueprints"
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
def reconcile_load_blueprints_v1_tasks(self):
|
@ManagedAppConfig.reconcile_global
|
||||||
|
def load_blueprints_v1_tasks(self):
|
||||||
"""Load v1 tasks"""
|
"""Load v1 tasks"""
|
||||||
self.import_module("authentik.blueprints.v1.tasks")
|
self.import_module("authentik.blueprints.v1.tasks")
|
||||||
|
|
||||||
def reconcile_blueprints_discovery(self):
|
@ManagedAppConfig.reconcile_tenant
|
||||||
|
def blueprints_discovery(self):
|
||||||
"""Run blueprint discovery"""
|
"""Run blueprint discovery"""
|
||||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Apply blueprint from commandline"""
|
"""Apply blueprint from commandline"""
|
||||||
|
|
||||||
from sys import exit as sys_exit
|
from sys import exit as sys_exit
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, no_translations
|
from django.core.management.base import BaseCommand, no_translations
|
||||||
@ -6,6 +7,7 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -16,14 +18,16 @@ class Command(BaseCommand):
|
|||||||
@no_translations
|
@no_translations
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""Apply all blueprints in order, abort when one fails to import"""
|
"""Apply all blueprints in order, abort when one fails to import"""
|
||||||
for blueprint_path in options.get("blueprints", []):
|
for tenant in Tenant.objects.filter(ready=True):
|
||||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
with tenant:
|
||||||
importer = Importer.from_string(content)
|
for blueprint_path in options.get("blueprints", []):
|
||||||
valid, _ = importer.validate()
|
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||||
if not valid:
|
importer = Importer.from_string(content)
|
||||||
self.stderr.write("blueprint invalid")
|
valid, _ = importer.validate()
|
||||||
sys_exit(1)
|
if not valid:
|
||||||
importer.apply()
|
self.stderr.write("blueprint invalid")
|
||||||
|
sys_exit(1)
|
||||||
|
importer.apply()
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument("blueprints", nargs="+", type=str)
|
parser.add_argument("blueprints", nargs="+", type=str)
|
||||||
|
@ -1,17 +1,19 @@
|
|||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
from django.core.management.base import BaseCommand, no_translations
|
|
||||||
|
from django.core.management.base import no_translations
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.v1.exporter import Exporter
|
from authentik.blueprints.v1.exporter import Exporter
|
||||||
|
from authentik.tenants.management import TenantCommand
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(TenantCommand):
|
||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
|
|
||||||
@no_translations
|
@no_translations
|
||||||
def handle(self, *args, **options):
|
def handle_per_tenant(self, *args, **options):
|
||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
exporter = Exporter()
|
exporter = Exporter()
|
||||||
self.stdout.write(exporter.export_to_string())
|
self.stdout.write(exporter.export_to_string())
|
||||||
|
@ -1,14 +1,17 @@
|
|||||||
"""Generate JSON Schema for blueprints"""
|
"""Generate JSON Schema for blueprints"""
|
||||||
|
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, no_translations
|
from django.core.management.base import BaseCommand, no_translations
|
||||||
from django.db.models import Model
|
from django.db.models import Model, fields
|
||||||
from drf_jsonschema_serializer.convert import field_to_converter
|
from drf_jsonschema_serializer.convert import converter, field_to_converter
|
||||||
from rest_framework.fields import Field, JSONField, UUIDField
|
from rest_framework.fields import Field, JSONField, UUIDField
|
||||||
|
from rest_framework.relations import PrimaryKeyRelatedField
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik import __version__
|
||||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||||
@ -17,6 +20,23 @@ from authentik.lib.models import SerializerModel
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
@converter
|
||||||
|
class PrimaryKeyRelatedFieldConverter:
|
||||||
|
"""Custom primary key field converter which is aware of non-integer based PKs
|
||||||
|
|
||||||
|
This is not an exhaustive fix for other non-int PKs, however in authentik we either
|
||||||
|
use UUIDs or ints"""
|
||||||
|
|
||||||
|
field_class = PrimaryKeyRelatedField
|
||||||
|
|
||||||
|
def convert(self, field: PrimaryKeyRelatedField):
|
||||||
|
model: Model = field.queryset.model
|
||||||
|
pk_field = model._meta.pk
|
||||||
|
if isinstance(pk_field, fields.UUIDField):
|
||||||
|
return {"type": "string", "format": "uuid"}
|
||||||
|
return {"type": "integer"}
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""Generate JSON Schema for blueprints"""
|
"""Generate JSON Schema for blueprints"""
|
||||||
|
|
||||||
@ -28,7 +48,7 @@ class Command(BaseCommand):
|
|||||||
"$schema": "http://json-schema.org/draft-07/schema",
|
"$schema": "http://json-schema.org/draft-07/schema",
|
||||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "authentik Blueprint schema",
|
"title": f"authentik {__version__} Blueprint schema",
|
||||||
"required": ["version", "entries"],
|
"required": ["version", "entries"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"version": {
|
"version": {
|
||||||
|
@ -14,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
|||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
||||||
"""Check if blueprint should be imported"""
|
"""Check if blueprint should be imported"""
|
||||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||||
@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
|||||||
if version != 1:
|
if version != 1:
|
||||||
return
|
return
|
||||||
blueprint_file.seek(0)
|
blueprint_file.seek(0)
|
||||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
instance: BlueprintInstance = (
|
||||||
|
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||||
|
)
|
||||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||||
meta = None
|
meta = None
|
||||||
if metadata:
|
if metadata:
|
||||||
@ -37,7 +39,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
|||||||
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
||||||
return
|
return
|
||||||
if not instance:
|
if not instance:
|
||||||
instance = BlueprintInstance(
|
BlueprintInstance.objects.using(db_alias).create(
|
||||||
name=meta.name if meta else str(rel_path),
|
name=meta.name if meta else str(rel_path),
|
||||||
path=str(rel_path),
|
path=str(rel_path),
|
||||||
context={},
|
context={},
|
||||||
@ -47,7 +49,6 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
|||||||
last_applied_hash="",
|
last_applied_hash="",
|
||||||
metadata=metadata or {},
|
metadata=metadata or {},
|
||||||
)
|
)
|
||||||
instance.save()
|
|
||||||
|
|
||||||
|
|
||||||
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
@ -56,7 +57,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
check_blueprint_v1_file(BlueprintInstance, db_alias, Path(file))
|
||||||
|
|
||||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||||
# If we already have flows (and we should always run before flow migrations)
|
# If we already have flows (and we should always run before flow migrations)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""blueprint models"""
|
"""blueprint models"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
@ -70,6 +71,19 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
enabled = models.BooleanField(default=True)
|
enabled = models.BooleanField(default=True)
|
||||||
managed_models = ArrayField(models.TextField(), default=list)
|
managed_models = ArrayField(models.TextField(), default=list)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Blueprint Instance")
|
||||||
|
verbose_name_plural = _("Blueprint Instances")
|
||||||
|
unique_together = (
|
||||||
|
(
|
||||||
|
"name",
|
||||||
|
"path",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Blueprint Instance {self.name}"
|
||||||
|
|
||||||
def retrieve_oci(self) -> str:
|
def retrieve_oci(self) -> str:
|
||||||
"""Get blueprint from an OCI registry"""
|
"""Get blueprint from an OCI registry"""
|
||||||
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
|
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
|
||||||
@ -88,7 +102,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||||
with full_path.open("r", encoding="utf-8") as _file:
|
with full_path.open("r", encoding="utf-8") as _file:
|
||||||
return _file.read()
|
return _file.read()
|
||||||
except (IOError, OSError) as exc:
|
except OSError as exc:
|
||||||
raise BlueprintRetrievalFailed(exc) from exc
|
raise BlueprintRetrievalFailed(exc) from exc
|
||||||
|
|
||||||
def retrieve(self) -> str:
|
def retrieve(self) -> str:
|
||||||
@ -104,16 +118,3 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
from authentik.blueprints.api import BlueprintInstanceSerializer
|
from authentik.blueprints.api import BlueprintInstanceSerializer
|
||||||
|
|
||||||
return BlueprintInstanceSerializer
|
return BlueprintInstanceSerializer
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return f"Blueprint Instance {self.name}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _("Blueprint Instance")
|
|
||||||
verbose_name_plural = _("Blueprint Instances")
|
|
||||||
unique_together = (
|
|
||||||
(
|
|
||||||
"name",
|
|
||||||
"path",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""blueprint Settings"""
|
"""blueprint Settings"""
|
||||||
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
|
|
||||||
from authentik.lib.utils.time import fqdn_rand
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Blueprint helpers"""
|
"""Blueprint helpers"""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
@ -38,7 +39,7 @@ def reconcile_app(app_name: str):
|
|||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
config = apps.get_app_config(app_name)
|
config = apps.get_app_config(app_name)
|
||||||
if isinstance(config, ManagedAppConfig):
|
if isinstance(config, ManagedAppConfig):
|
||||||
config.reconcile()
|
config._on_startup_callback(None)
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""authentik managed models tests"""
|
"""authentik managed models tests"""
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints OCI"""
|
"""Test blueprints OCI"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
|
||||||
|
@ -1,22 +1,23 @@
|
|||||||
"""test packaged blueprints"""
|
"""test packaged blueprints"""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.blueprints.tests import apply_blueprint
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.brands.models import Brand
|
||||||
|
|
||||||
|
|
||||||
class TestPackaged(TransactionTestCase):
|
class TestPackaged(TransactionTestCase):
|
||||||
"""Empty class, test methods are added dynamically"""
|
"""Empty class, test methods are added dynamically"""
|
||||||
|
|
||||||
@apply_blueprint("default/default-tenant.yaml")
|
@apply_blueprint("default/default-brand.yaml")
|
||||||
def test_decorator_static(self):
|
def test_decorator_static(self):
|
||||||
"""Test @apply_blueprint decorator"""
|
"""Test @apply_blueprint decorator"""
|
||||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
self.assertTrue(Brand.objects.filter(domain="authentik-default").exists())
|
||||||
|
|
||||||
|
|
||||||
def blueprint_tester(file_name: Path) -> Callable:
|
def blueprint_tester(file_name: Path) -> Callable:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""authentik managed models tests"""
|
"""authentik managed models tests"""
|
||||||
from typing import Callable, Type
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@ -13,7 +14,7 @@ class TestModels(TestCase):
|
|||||||
"""Test Models"""
|
"""Test Models"""
|
||||||
|
|
||||||
|
|
||||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
|
||||||
"""Test serializer"""
|
"""Test serializer"""
|
||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1 api"""
|
"""Test blueprints v1 api"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from tempfile import NamedTemporaryFile, mkdtemp
|
from tempfile import NamedTemporaryFile, mkdtemp
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test blueprints v1 tasks"""
|
"""Test blueprints v1 tasks"""
|
||||||
|
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from tempfile import NamedTemporaryFile, mkdtemp
|
from tempfile import NamedTemporaryFile, mkdtemp
|
||||||
|
|
||||||
@ -53,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
file.seek(0)
|
file.seek(0)
|
||||||
file_hash = sha512(file.read().encode()).hexdigest()
|
file_hash = sha512(file.read().encode()).hexdigest()
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discovery()
|
||||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -81,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discovery()
|
||||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
blueprint.last_applied_hash,
|
blueprint.last_applied_hash,
|
||||||
@ -106,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discovery()
|
||||||
blueprint.refresh_from_db()
|
blueprint.refresh_from_db()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
blueprint.last_applied_hash,
|
blueprint.last_applied_hash,
|
||||||
@ -148,7 +149,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
instance.status,
|
instance.status,
|
||||||
BlueprintInstanceStatus.UNKNOWN,
|
BlueprintInstanceStatus.UNKNOWN,
|
||||||
)
|
)
|
||||||
apply_blueprint(instance.pk) # pylint: disable=no-value-for-parameter
|
apply_blueprint(instance.pk)
|
||||||
instance.refresh_from_db()
|
instance.refresh_from_db()
|
||||||
self.assertEqual(instance.last_applied_hash, "")
|
self.assertEqual(instance.last_applied_hash, "")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
|
|
||||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
"""transfer common classes"""
|
"""transfer common classes"""
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from collections.abc import Iterable, Mapping
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from operator import ixor
|
from operator import ixor
|
||||||
from os import getenv
|
from os import getenv
|
||||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
from typing import Any, Literal, Union
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
@ -44,7 +46,7 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
|||||||
class BlueprintEntryState:
|
class BlueprintEntryState:
|
||||||
"""State of a single instance"""
|
"""State of a single instance"""
|
||||||
|
|
||||||
instance: Optional[Model] = None
|
instance: Model | None = None
|
||||||
|
|
||||||
|
|
||||||
class BlueprintEntryDesiredState(Enum):
|
class BlueprintEntryDesiredState(Enum):
|
||||||
@ -66,14 +68,14 @@ class BlueprintEntry:
|
|||||||
)
|
)
|
||||||
conditions: list[Any] = field(default_factory=list)
|
conditions: list[Any] = field(default_factory=list)
|
||||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
attrs: dict[str, Any] | None = field(default_factory=dict)
|
||||||
|
|
||||||
id: Optional[str] = None
|
id: str | None = None
|
||||||
|
|
||||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||||
|
|
||||||
def __post_init__(self, *args, **kwargs) -> None:
|
def __post_init__(self, *args, **kwargs) -> None:
|
||||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
self.__tag_contexts: list[YAMLTagContext] = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||||
@ -91,10 +93,10 @@ class BlueprintEntry:
|
|||||||
attrs=all_attrs,
|
attrs=all_attrs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_tag_context(
|
def get_tag_context(
|
||||||
self,
|
self,
|
||||||
depth: int = 0,
|
depth: int = 0,
|
||||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None,
|
||||||
) -> "YAMLTagContext":
|
) -> "YAMLTagContext":
|
||||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||||
if depth < 0:
|
if depth < 0:
|
||||||
@ -107,8 +109,8 @@ class BlueprintEntry:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
return contexts[-(depth + 1)]
|
return contexts[-(depth + 1)]
|
||||||
except IndexError:
|
except IndexError as exc:
|
||||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc
|
||||||
|
|
||||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||||
"""Check if we have any special tags that need handling"""
|
"""Check if we have any special tags that need handling"""
|
||||||
@ -169,7 +171,7 @@ class Blueprint:
|
|||||||
entries: list[BlueprintEntry] = field(default_factory=list)
|
entries: list[BlueprintEntry] = field(default_factory=list)
|
||||||
context: dict = field(default_factory=dict)
|
context: dict = field(default_factory=dict)
|
||||||
|
|
||||||
metadata: Optional[BlueprintMetadata] = field(default=None)
|
metadata: BlueprintMetadata | None = field(default=None)
|
||||||
|
|
||||||
|
|
||||||
class YAMLTag:
|
class YAMLTag:
|
||||||
@ -217,7 +219,7 @@ class Env(YAMLTag):
|
|||||||
"""Lookup environment variable with optional default"""
|
"""Lookup environment variable with optional default"""
|
||||||
|
|
||||||
key: str
|
key: str
|
||||||
default: Optional[Any]
|
default: Any | None
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
@ -236,7 +238,7 @@ class Context(YAMLTag):
|
|||||||
"""Lookup key from instance context"""
|
"""Lookup key from instance context"""
|
||||||
|
|
||||||
key: str
|
key: str
|
||||||
default: Optional[Any]
|
default: Any | None
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
@ -280,7 +282,7 @@ class Format(YAMLTag):
|
|||||||
try:
|
try:
|
||||||
return self.format_string % tuple(args)
|
return self.format_string % tuple(args)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
|
|
||||||
|
|
||||||
class Find(YAMLTag):
|
class Find(YAMLTag):
|
||||||
@ -365,7 +367,7 @@ class Condition(YAMLTag):
|
|||||||
comparator = self._COMPARATORS[self.mode.upper()]
|
comparator = self._COMPARATORS[self.mode.upper()]
|
||||||
return comparator(tuple(bool(x) for x in args))
|
return comparator(tuple(bool(x) for x in args))
|
||||||
except (TypeError, KeyError) as exc:
|
except (TypeError, KeyError) as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
|
|
||||||
|
|
||||||
class If(YAMLTag):
|
class If(YAMLTag):
|
||||||
@ -397,7 +399,7 @@ class If(YAMLTag):
|
|||||||
blueprint,
|
blueprint,
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
|
|
||||||
|
|
||||||
class Enumerate(YAMLTag, YAMLTagContext):
|
class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
@ -411,9 +413,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
"SEQ": (list, lambda a, b: [*a, b]),
|
"SEQ": (list, lambda a, b: [*a, b]),
|
||||||
"MAP": (
|
"MAP": (
|
||||||
dict,
|
dict,
|
||||||
lambda a, b: always_merger.merge(
|
lambda a, b: always_merger.merge(a, {b[0]: b[1]} if isinstance(b, tuple | list) else b),
|
||||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -455,7 +455,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
try:
|
try:
|
||||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
|
|
||||||
result = output_class()
|
result = output_class()
|
||||||
|
|
||||||
@ -483,13 +483,13 @@ class EnumeratedItem(YAMLTag):
|
|||||||
|
|
||||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.depth = int(node.value)
|
self.depth = int(node.value)
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
try:
|
try:
|
||||||
context_tag: Enumerate = entry._get_tag_context(
|
context_tag: Enumerate = entry.get_tag_context(
|
||||||
depth=self.depth,
|
depth=self.depth,
|
||||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||||
)
|
)
|
||||||
@ -499,9 +499,11 @@ class EnumeratedItem(YAMLTag):
|
|||||||
f"{self.__class__.__name__} tags are only usable "
|
f"{self.__class__.__name__} tags are only usable "
|
||||||
f"inside an {Enumerate.__name__} tag",
|
f"inside an {Enumerate.__name__} tag",
|
||||||
entry,
|
entry,
|
||||||
)
|
) from exc
|
||||||
|
|
||||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
raise EntryInvalidError.from_entry(
|
||||||
|
f"{self.__class__.__name__} tag: {exc}", entry
|
||||||
|
) from exc
|
||||||
|
|
||||||
return context_tag.get_context(entry, blueprint)
|
return context_tag.get_context(entry, blueprint)
|
||||||
|
|
||||||
@ -514,8 +516,8 @@ class Index(EnumeratedItem):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
return context[0]
|
return context[0]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError as exc: # pragma: no cover
|
||||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
|
||||||
|
|
||||||
|
|
||||||
class Value(EnumeratedItem):
|
class Value(EnumeratedItem):
|
||||||
@ -526,8 +528,8 @@ class Value(EnumeratedItem):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
return context[1]
|
return context[1]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError as exc: # pragma: no cover
|
||||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
|
||||||
|
|
||||||
|
|
||||||
class BlueprintDumper(SafeDumper):
|
class BlueprintDumper(SafeDumper):
|
||||||
@ -554,7 +556,11 @@ class BlueprintDumper(SafeDumper):
|
|||||||
|
|
||||||
def factory(items):
|
def factory(items):
|
||||||
final_dict = dict(items)
|
final_dict = dict(items)
|
||||||
|
# Remove internal state variables
|
||||||
final_dict.pop("_state", None)
|
final_dict.pop("_state", None)
|
||||||
|
# Future-proof to only remove the ID if we don't set a value
|
||||||
|
if "id" in final_dict and final_dict.get("id") is None:
|
||||||
|
final_dict.pop("id")
|
||||||
return final_dict
|
return final_dict
|
||||||
|
|
||||||
data = asdict(data, dict_factory=factory)
|
data = asdict(data, dict_factory=factory)
|
||||||
@ -581,13 +587,13 @@ class BlueprintLoader(SafeLoader):
|
|||||||
class EntryInvalidError(SentryIgnoredException):
|
class EntryInvalidError(SentryIgnoredException):
|
||||||
"""Error raised when an entry is invalid"""
|
"""Error raised when an entry is invalid"""
|
||||||
|
|
||||||
entry_model: Optional[str]
|
entry_model: str | None
|
||||||
entry_id: Optional[str]
|
entry_id: str | None
|
||||||
validation_error: Optional[ValidationError]
|
validation_error: ValidationError | None
|
||||||
serializer: Optional[Serializer] = None
|
serializer: Serializer | None = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs
|
self, *args: object, validation_error: ValidationError | None = None, **kwargs
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.entry_model = None
|
self.entry_model = None
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Blueprint exporter"""
|
"""Blueprint exporter"""
|
||||||
from typing import Iterable
|
|
||||||
|
from collections.abc import Iterable
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
@ -7,7 +8,6 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.db.models import Model, Q, QuerySet
|
from django.db.models import Model, Q, QuerySet
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from guardian.shortcuts import get_anonymous_user
|
|
||||||
from yaml import dump
|
from yaml import dump
|
||||||
|
|
||||||
from authentik.blueprints.v1.common import (
|
from authentik.blueprints.v1.common import (
|
||||||
@ -48,7 +48,7 @@ class Exporter:
|
|||||||
"""Return a queryset for `model`. Can be used to filter some
|
"""Return a queryset for `model`. Can be used to filter some
|
||||||
objects on some models"""
|
objects on some models"""
|
||||||
if model == get_user_model():
|
if model == get_user_model():
|
||||||
return model.objects.exclude(pk=get_anonymous_user().pk)
|
return model.objects.exclude_anonymous()
|
||||||
return model.objects.all()
|
return model.objects.all()
|
||||||
|
|
||||||
def _pre_export(self, blueprint: Blueprint):
|
def _pre_export(self, blueprint: Blueprint):
|
||||||
@ -59,7 +59,7 @@ class Exporter:
|
|||||||
blueprint = Blueprint()
|
blueprint = Blueprint()
|
||||||
self._pre_export(blueprint)
|
self._pre_export(blueprint)
|
||||||
blueprint.metadata = BlueprintMetadata(
|
blueprint.metadata = BlueprintMetadata(
|
||||||
name=_("authentik Export - %(date)s" % {"date": str(now())}),
|
name=_("authentik Export - {date}".format_map({"date": str(now())})),
|
||||||
labels={
|
labels={
|
||||||
LABEL_AUTHENTIK_GENERATED: "true",
|
LABEL_AUTHENTIK_GENERATED: "true",
|
||||||
},
|
},
|
||||||
@ -74,7 +74,7 @@ class Exporter:
|
|||||||
|
|
||||||
|
|
||||||
class FlowExporter(Exporter):
|
class FlowExporter(Exporter):
|
||||||
"""Exporter customised to only return objects related to `flow`"""
|
"""Exporter customized to only return objects related to `flow`"""
|
||||||
|
|
||||||
flow: Flow
|
flow: Flow
|
||||||
with_policies: bool
|
with_policies: bool
|
||||||
|
@ -1,22 +1,24 @@
|
|||||||
"""Blueprint importer"""
|
"""Blueprint importer"""
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any, Optional
|
from typing import Any
|
||||||
|
|
||||||
from dacite.config import Config
|
from dacite.config import Config
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
|
from django.contrib.auth.models import Permission
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
|
from guardian.models import UserObjectPermission
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
from structlog.testing import capture_logs
|
|
||||||
from structlog.types import EventDict
|
|
||||||
from yaml import load
|
from yaml import load
|
||||||
|
|
||||||
from authentik.blueprints.v1.common import (
|
from authentik.blueprints.v1.common import (
|
||||||
@ -35,14 +37,31 @@ from authentik.core.models import (
|
|||||||
Source,
|
Source,
|
||||||
UserSourceConnection,
|
UserSourceConnection,
|
||||||
)
|
)
|
||||||
|
from authentik.enterprise.license import LicenseKey
|
||||||
from authentik.enterprise.models import LicenseUsage
|
from authentik.enterprise.models import LicenseUsage
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||||
|
from authentik.events.logs import LogEvent, capture_logs
|
||||||
|
from authentik.events.models import SystemTask
|
||||||
from authentik.events.utils import cleanse_dict
|
from authentik.events.utils import cleanse_dict
|
||||||
from authentik.flows.models import FlowToken, Stage
|
from authentik.flows.models import FlowToken, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
from authentik.policies.reputation.models import Reputation
|
||||||
|
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||||
|
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||||
|
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||||
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
# Context set when the serializer is created in a blueprint context
|
||||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||||
@ -52,13 +71,17 @@ SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
|||||||
def excluded_models() -> list[type[Model]]:
|
def excluded_models() -> list[type[Model]]:
|
||||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
"""Return a list of all excluded models that shouldn't be exposed via API
|
||||||
or other means (internal only, base classes, non-used objects, etc)"""
|
or other means (internal only, base classes, non-used objects, etc)"""
|
||||||
# pylint: disable=imported-auth-user
|
|
||||||
from django.contrib.auth.models import Group as DjangoGroup
|
from django.contrib.auth.models import Group as DjangoGroup
|
||||||
from django.contrib.auth.models import User as DjangoUser
|
from django.contrib.auth.models import User as DjangoUser
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
# Django only classes
|
||||||
DjangoUser,
|
DjangoUser,
|
||||||
DjangoGroup,
|
DjangoGroup,
|
||||||
|
ContentType,
|
||||||
|
Permission,
|
||||||
|
UserObjectPermission,
|
||||||
# Base classes
|
# Base classes
|
||||||
Provider,
|
Provider,
|
||||||
Source,
|
Source,
|
||||||
@ -71,16 +94,31 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
# Classes that have other dependencies
|
# Classes that have other dependencies
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
# Classes which are only internally managed
|
# Classes which are only internally managed
|
||||||
|
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||||
FlowToken,
|
FlowToken,
|
||||||
LicenseUsage,
|
LicenseUsage,
|
||||||
SCIMGroup,
|
SCIMProviderGroup,
|
||||||
SCIMUser,
|
SCIMProviderUser,
|
||||||
|
Tenant,
|
||||||
|
SystemTask,
|
||||||
|
ConnectionToken,
|
||||||
|
AuthorizationCode,
|
||||||
|
AccessToken,
|
||||||
|
RefreshToken,
|
||||||
|
Reputation,
|
||||||
|
WebAuthnDeviceType,
|
||||||
|
SCIMSourceUser,
|
||||||
|
SCIMSourceGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_model_allowed(model: type[Model]) -> bool:
|
def is_model_allowed(model: type[Model]) -> bool:
|
||||||
"""Check if model is allowed"""
|
"""Check if model is allowed"""
|
||||||
return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel))
|
return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel)
|
||||||
|
|
||||||
|
|
||||||
class DoRollback(SentryIgnoredException):
|
class DoRollback(SentryIgnoredException):
|
||||||
@ -104,16 +142,20 @@ class Importer:
|
|||||||
logger: BoundLogger
|
logger: BoundLogger
|
||||||
_import: Blueprint
|
_import: Blueprint
|
||||||
|
|
||||||
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
def __init__(self, blueprint: Blueprint, context: dict | None = None):
|
||||||
self.__pk_map: dict[Any, Model] = {}
|
self.__pk_map: dict[Any, Model] = {}
|
||||||
self._import = blueprint
|
self._import = blueprint
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
ctx = {}
|
ctx = self.default_context()
|
||||||
always_merger.merge(ctx, self._import.context)
|
always_merger.merge(ctx, self._import.context)
|
||||||
if context:
|
if context:
|
||||||
always_merger.merge(ctx, context)
|
always_merger.merge(ctx, context)
|
||||||
self._import.context = ctx
|
self._import.context = ctx
|
||||||
|
|
||||||
|
def default_context(self):
|
||||||
|
"""Default context"""
|
||||||
|
return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||||
"""Parse YAML string and create blueprint importer from it"""
|
"""Parse YAML string and create blueprint importer from it"""
|
||||||
@ -136,14 +178,14 @@ class Importer:
|
|||||||
|
|
||||||
def updater(value) -> Any:
|
def updater(value) -> Any:
|
||||||
if value in self.__pk_map:
|
if value in self.__pk_map:
|
||||||
self.logger.debug("updating reference in entry", value=value)
|
self.logger.debug("Updating reference in entry", value=value)
|
||||||
return self.__pk_map[value]
|
return self.__pk_map[value]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
for key, value in attrs.items():
|
for key, value in attrs.items():
|
||||||
try:
|
try:
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
for idx, _inner_key in enumerate(value):
|
for _, _inner_key in enumerate(value):
|
||||||
value[_inner_key] = updater(value[_inner_key])
|
value[_inner_key] = updater(value[_inner_key])
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
for idx, _inner_value in enumerate(value):
|
for idx, _inner_value in enumerate(value):
|
||||||
@ -172,8 +214,7 @@ class Importer:
|
|||||||
|
|
||||||
return main_query | sub_query
|
return main_query | sub_query
|
||||||
|
|
||||||
# pylint: disable-msg=too-many-locals
|
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:
|
||||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
|
||||||
"""Validate a single entry"""
|
"""Validate a single entry"""
|
||||||
if not entry.check_all_conditions_match(self._import):
|
if not entry.check_all_conditions_match(self._import):
|
||||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||||
@ -226,7 +267,7 @@ class Importer:
|
|||||||
model_instance = existing_models.first()
|
model_instance = existing_models.first()
|
||||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialise serializer with instance",
|
"Initialise serializer with instance",
|
||||||
model=model,
|
model=model,
|
||||||
instance=model_instance,
|
instance=model_instance,
|
||||||
pk=model_instance.pk,
|
pk=model_instance.pk,
|
||||||
@ -236,14 +277,14 @@ class Importer:
|
|||||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError.from_entry(
|
||||||
(
|
(
|
||||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||||
"and object exists already",
|
"and object exists already",
|
||||||
),
|
),
|
||||||
entry,
|
entry,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialised new serializer instance",
|
"Initialised new serializer instance",
|
||||||
model=model,
|
model=model,
|
||||||
**cleanse_dict(updated_identifiers),
|
**cleanse_dict(updated_identifiers),
|
||||||
)
|
)
|
||||||
@ -300,7 +341,7 @@ class Importer:
|
|||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"app or model does not exist", app=model_app_label, model=model_name
|
"App or Model does not exist", app=model_app_label, model=model_name
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
# Validate each single entry
|
# Validate each single entry
|
||||||
@ -312,7 +353,7 @@ class Importer:
|
|||||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||||
serializer = exc.serializer
|
serializer = exc.serializer
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc)
|
||||||
if raise_errors:
|
if raise_errors:
|
||||||
raise exc
|
raise exc
|
||||||
return False
|
return False
|
||||||
@ -332,27 +373,27 @@ class Importer:
|
|||||||
and state == BlueprintEntryDesiredState.CREATED
|
and state == BlueprintEntryDesiredState.CREATED
|
||||||
):
|
):
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"instance exists, skipping",
|
"Instance exists, skipping",
|
||||||
model=model,
|
model=model,
|
||||||
instance=instance,
|
instance=instance,
|
||||||
pk=instance.pk,
|
pk=instance.pk,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
instance = serializer.save()
|
instance = serializer.save()
|
||||||
self.logger.debug("updated model", model=instance)
|
self.logger.debug("Updated model", model=instance)
|
||||||
if "pk" in entry.identifiers:
|
if "pk" in entry.identifiers:
|
||||||
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
||||||
entry._state = BlueprintEntryState(instance)
|
entry._state = BlueprintEntryState(instance)
|
||||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||||
instance: Optional[Model] = serializer.instance
|
instance: Model | None = serializer.instance
|
||||||
if instance.pk:
|
if instance.pk:
|
||||||
instance.delete()
|
instance.delete()
|
||||||
self.logger.debug("deleted model", mode=instance)
|
self.logger.debug("Deleted model", mode=instance)
|
||||||
continue
|
continue
|
||||||
self.logger.debug("entry to delete with no instance, skipping")
|
self.logger.debug("Entry to delete with no instance, skipping")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]:
|
||||||
"""Validate loaded blueprint export, ensure all models are allowed
|
"""Validate loaded blueprint export, ensure all models are allowed
|
||||||
and serializers have no errors"""
|
and serializers have no errors"""
|
||||||
self.logger.debug("Starting blueprint import validation")
|
self.logger.debug("Starting blueprint import validation")
|
||||||
@ -366,9 +407,7 @@ class Importer:
|
|||||||
):
|
):
|
||||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||||
if not successful:
|
if not successful:
|
||||||
self.logger.debug("Blueprint validation failed")
|
self.logger.warning("Blueprint validation failed")
|
||||||
for log in logs:
|
|
||||||
getattr(self.logger, log.get("log_level"))(**log)
|
|
||||||
self.logger.debug("Finished blueprint import validation")
|
self.logger.debug("Finished blueprint import validation")
|
||||||
self._import = orig_import
|
self._import = orig_import
|
||||||
return successful, logs
|
return successful, logs
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Apply Blueprint meta model"""
|
"""Apply Blueprint meta model"""
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
@ -42,7 +43,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
|||||||
LOGGER.info("Blueprint does not exist, but not required")
|
LOGGER.info("Blueprint does not exist, but not required")
|
||||||
return MetaResult()
|
return MetaResult()
|
||||||
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
apply_blueprint(str(self.blueprint_instance.pk))
|
apply_blueprint(str(self.blueprint_instance.pk))
|
||||||
return MetaResult()
|
return MetaResult()
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Base models"""
|
"""Base models"""
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
@ -7,15 +8,15 @@ from rest_framework.serializers import Serializer
|
|||||||
class BaseMetaModel(Model):
|
class BaseMetaModel(Model):
|
||||||
"""Base models"""
|
"""Base models"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serializer() -> Serializer:
|
def serializer() -> Serializer:
|
||||||
"""Serializer similar to SerializerModel, but as a static method since
|
"""Serializer similar to SerializerModel, but as a static method since
|
||||||
this is an abstract model"""
|
this is an abstract model"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
class Meta:
|
|
||||||
abstract = True
|
|
||||||
|
|
||||||
|
|
||||||
class MetaResult:
|
class MetaResult:
|
||||||
"""Result returned by Meta Models' serializers. Empty class but we can't return none as
|
"""Result returned by Meta Models' serializers. Empty class but we can't return none as
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""OCI Client"""
|
"""OCI Client"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from urllib.parse import ParseResult, urlparse
|
from urllib.parse import ParseResult, urlparse
|
||||||
|
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
"""v1 blueprints tasks"""
|
"""v1 blueprints tasks"""
|
||||||
|
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from sys import platform
|
||||||
|
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||||
@ -29,15 +30,13 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
|
|||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||||
from authentik.events.monitored_tasks import (
|
from authentik.events.logs import capture_logs
|
||||||
MonitoredTask,
|
from authentik.events.models import TaskStatus
|
||||||
TaskResult,
|
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||||
TaskResultStatus,
|
|
||||||
prefill_task,
|
|
||||||
)
|
|
||||||
from authentik.events.utils import sanitize_dict
|
from authentik.events.utils import sanitize_dict
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
_file_watcher_started = False
|
_file_watcher_started = False
|
||||||
@ -51,18 +50,23 @@ class BlueprintFile:
|
|||||||
version: int
|
version: int
|
||||||
hash: str
|
hash: str
|
||||||
last_m: int
|
last_m: int
|
||||||
meta: Optional[BlueprintMetadata] = field(default=None)
|
meta: BlueprintMetadata | None = field(default=None)
|
||||||
|
|
||||||
|
|
||||||
def start_blueprint_watcher():
|
def start_blueprint_watcher():
|
||||||
"""Start blueprint watcher, if it's not running already."""
|
"""Start blueprint watcher, if it's not running already."""
|
||||||
# This function might be called twice since it's called on celery startup
|
# This function might be called twice since it's called on celery startup
|
||||||
# pylint: disable=global-statement
|
|
||||||
global _file_watcher_started
|
global _file_watcher_started # noqa: PLW0603
|
||||||
if _file_watcher_started:
|
if _file_watcher_started:
|
||||||
return
|
return
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
kwargs = {}
|
||||||
|
if platform.startswith("linux"):
|
||||||
|
kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
|
||||||
|
observer.schedule(
|
||||||
|
BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
|
||||||
|
)
|
||||||
observer.start()
|
observer.start()
|
||||||
_file_watcher_started = True
|
_file_watcher_started = True
|
||||||
|
|
||||||
@ -70,21 +74,36 @@ def start_blueprint_watcher():
|
|||||||
class BlueprintEventHandler(FileSystemEventHandler):
|
class BlueprintEventHandler(FileSystemEventHandler):
|
||||||
"""Event handler for blueprint events"""
|
"""Event handler for blueprint events"""
|
||||||
|
|
||||||
def on_any_event(self, event: FileSystemEvent):
|
# We only ever get creation and modification events.
|
||||||
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
# See the creation of the Observer instance above for the event filtering.
|
||||||
return
|
|
||||||
|
# Even though we filter to only get file events, we might still get
|
||||||
|
# directory events as some implementations such as inotify do not support
|
||||||
|
# filtering on file/directory.
|
||||||
|
|
||||||
|
def dispatch(self, event: FileSystemEvent) -> None:
|
||||||
|
"""Call specific event handler method. Ignores directory changes."""
|
||||||
if event.is_directory:
|
if event.is_directory:
|
||||||
return
|
return None
|
||||||
|
return super().dispatch(event)
|
||||||
|
|
||||||
|
def on_created(self, event: FileSystemEvent):
|
||||||
|
"""Process file creation"""
|
||||||
|
LOGGER.debug("new blueprint file created, starting discovery")
|
||||||
|
for tenant in Tenant.objects.filter(ready=True):
|
||||||
|
with tenant:
|
||||||
|
blueprints_discovery.delay()
|
||||||
|
|
||||||
|
def on_modified(self, event: FileSystemEvent):
|
||||||
|
"""Process file modification"""
|
||||||
|
path = Path(event.src_path)
|
||||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||||
path = Path(event.src_path).absolute()
|
|
||||||
rel_path = str(path.relative_to(root))
|
rel_path = str(path.relative_to(root))
|
||||||
if isinstance(event, FileCreatedEvent):
|
for tenant in Tenant.objects.filter(ready=True):
|
||||||
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
with tenant:
|
||||||
blueprints_discovery.delay(rel_path)
|
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||||
if isinstance(event, FileModifiedEvent):
|
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
apply_blueprint.delay(instance.pk.hex)
|
||||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
|
||||||
apply_blueprint.delay(instance.pk.hex)
|
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
@ -107,7 +126,7 @@ def blueprints_find() -> list[BlueprintFile]:
|
|||||||
# Check if any part in the path starts with a dot and assume a hidden file
|
# Check if any part in the path starts with a dot and assume a hidden file
|
||||||
if any(part for part in path.parts if part.startswith(".")):
|
if any(part for part in path.parts if part.startswith(".")):
|
||||||
continue
|
continue
|
||||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
with open(path, encoding="utf-8") as blueprint_file:
|
||||||
try:
|
try:
|
||||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||||
except YAMLError as exc:
|
except YAMLError as exc:
|
||||||
@ -128,10 +147,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
|||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True
|
||||||
)
|
)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
def blueprints_discovery(self: SystemTask, path: str | None = None):
|
||||||
"""Find blueprints and check if they need to be created in the database"""
|
"""Find blueprints and check if they need to be created in the database"""
|
||||||
count = 0
|
count = 0
|
||||||
for blueprint in blueprints_find():
|
for blueprint in blueprints_find():
|
||||||
@ -140,10 +159,7 @@ def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
|||||||
check_blueprint_v1_file(blueprint)
|
check_blueprint_v1_file(blueprint)
|
||||||
count += 1
|
count += 1
|
||||||
self.set_status(
|
self.set_status(
|
||||||
TaskResult(
|
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
|
||||||
TaskResultStatus.SUCCESSFUL,
|
|
||||||
messages=[_("Successfully imported %(count)d files." % {"count": count})],
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -176,12 +192,12 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
|||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
bind=True,
|
bind=True,
|
||||||
base=MonitoredTask,
|
base=SystemTask,
|
||||||
)
|
)
|
||||||
def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
def apply_blueprint(self: SystemTask, instance_pk: str):
|
||||||
"""Apply single blueprint"""
|
"""Apply single blueprint"""
|
||||||
self.save_on_success = False
|
self.save_on_success = False
|
||||||
instance: Optional[BlueprintInstance] = None
|
instance: BlueprintInstance | None = None
|
||||||
try:
|
try:
|
||||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||||
if not instance or not instance.enabled:
|
if not instance or not instance.enabled:
|
||||||
@ -196,29 +212,30 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
|||||||
if not valid:
|
if not valid:
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
instance.save()
|
instance.save()
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
|
self.set_status(TaskStatus.ERROR, *logs)
|
||||||
return
|
|
||||||
applied = importer.apply()
|
|
||||||
if not applied:
|
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
|
||||||
instance.save()
|
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
|
|
||||||
return
|
return
|
||||||
|
with capture_logs() as logs:
|
||||||
|
applied = importer.apply()
|
||||||
|
if not applied:
|
||||||
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
|
instance.save()
|
||||||
|
self.set_status(TaskStatus.ERROR, *logs)
|
||||||
|
return
|
||||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||||
instance.last_applied_hash = file_hash
|
instance.last_applied_hash = file_hash
|
||||||
instance.last_applied = now()
|
instance.last_applied = now()
|
||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
self.set_status(TaskStatus.SUCCESSFUL)
|
||||||
except (
|
except (
|
||||||
|
OSError,
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
InternalError,
|
InternalError,
|
||||||
IOError,
|
|
||||||
BlueprintRetrievalFailed,
|
BlueprintRetrievalFailed,
|
||||||
EntryInvalidError,
|
EntryInvalidError,
|
||||||
) as exc:
|
) as exc:
|
||||||
if instance:
|
if instance:
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
self.set_error(exc)
|
||||||
finally:
|
finally:
|
||||||
if instance:
|
if instance:
|
||||||
instance.save()
|
instance.save()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Serializer for tenant models"""
|
"""Serializer for brands models"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
@ -10,39 +11,39 @@ from rest_framework.filters import OrderingFilter, SearchFilter
|
|||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.validators import UniqueValidator
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.api.authorization import SecretKeyFilter
|
from authentik.api.authorization import SecretKeyFilter
|
||||||
|
from authentik.brands.models import Brand
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.tenants.utils import get_current_tenant
|
||||||
from authentik.tenants.models import Tenant
|
|
||||||
|
|
||||||
|
|
||||||
class FooterLinkSerializer(PassiveSerializer):
|
class FooterLinkSerializer(PassiveSerializer):
|
||||||
"""Links returned in Config API"""
|
"""Links returned in Config API"""
|
||||||
|
|
||||||
href = CharField(read_only=True)
|
href = CharField(read_only=True, allow_null=True)
|
||||||
name = CharField(read_only=True)
|
name = CharField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class TenantSerializer(ModelSerializer):
|
class BrandSerializer(ModelSerializer):
|
||||||
"""Tenant Serializer"""
|
"""Brand Serializer"""
|
||||||
|
|
||||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
if attrs.get("default", False):
|
if attrs.get("default", False):
|
||||||
tenants = Tenant.objects.filter(default=True)
|
brands = Brand.objects.filter(default=True)
|
||||||
if self.instance:
|
if self.instance:
|
||||||
tenants = tenants.exclude(pk=self.instance.pk)
|
brands = brands.exclude(pk=self.instance.pk)
|
||||||
if tenants.exists():
|
if brands.exists():
|
||||||
raise ValidationError({"default": "Only a single Tenant can be set as default."})
|
raise ValidationError({"default": "Only a single brand can be set as default."})
|
||||||
return super().validate(attrs)
|
return super().validate(attrs)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Tenant
|
model = Brand
|
||||||
fields = [
|
fields = [
|
||||||
"tenant_uuid",
|
"brand_uuid",
|
||||||
"domain",
|
"domain",
|
||||||
"default",
|
"default",
|
||||||
"branding_title",
|
"branding_title",
|
||||||
@ -54,10 +55,14 @@ class TenantSerializer(ModelSerializer):
|
|||||||
"flow_unenrollment",
|
"flow_unenrollment",
|
||||||
"flow_user_settings",
|
"flow_user_settings",
|
||||||
"flow_device_code",
|
"flow_device_code",
|
||||||
"event_retention",
|
|
||||||
"web_certificate",
|
"web_certificate",
|
||||||
"attributes",
|
"attributes",
|
||||||
]
|
]
|
||||||
|
extra_kwargs = {
|
||||||
|
# TODO: This field isn't unique on the database which is hard to backport
|
||||||
|
# hence we just validate the uniqueness here
|
||||||
|
"domain": {"validators": [UniqueValidator(Brand.objects.all())]},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Themes(models.TextChoices):
|
class Themes(models.TextChoices):
|
||||||
@ -68,8 +73,13 @@ class Themes(models.TextChoices):
|
|||||||
DARK = "dark"
|
DARK = "dark"
|
||||||
|
|
||||||
|
|
||||||
class CurrentTenantSerializer(PassiveSerializer):
|
def get_default_ui_footer_links():
|
||||||
"""Partial tenant information for styling"""
|
"""Get default UI footer links based on current tenant settings"""
|
||||||
|
return get_current_tenant().footer_links
|
||||||
|
|
||||||
|
|
||||||
|
class CurrentBrandSerializer(PassiveSerializer):
|
||||||
|
"""Partial brand information for styling"""
|
||||||
|
|
||||||
matched_domain = CharField(source="domain")
|
matched_domain = CharField(source="domain")
|
||||||
branding_title = CharField()
|
branding_title = CharField()
|
||||||
@ -78,7 +88,7 @@ class CurrentTenantSerializer(PassiveSerializer):
|
|||||||
ui_footer_links = ListField(
|
ui_footer_links = ListField(
|
||||||
child=FooterLinkSerializer(),
|
child=FooterLinkSerializer(),
|
||||||
read_only=True,
|
read_only=True,
|
||||||
default=CONFIG.get("footer_links", []),
|
default=get_default_ui_footer_links,
|
||||||
)
|
)
|
||||||
ui_theme = ChoiceField(
|
ui_theme = ChoiceField(
|
||||||
choices=Themes.choices,
|
choices=Themes.choices,
|
||||||
@ -97,18 +107,18 @@ class CurrentTenantSerializer(PassiveSerializer):
|
|||||||
default_locale = CharField(read_only=True)
|
default_locale = CharField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class TenantViewSet(UsedByMixin, ModelViewSet):
|
class BrandViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Tenant Viewset"""
|
"""Brand Viewset"""
|
||||||
|
|
||||||
queryset = Tenant.objects.all()
|
queryset = Brand.objects.all()
|
||||||
serializer_class = TenantSerializer
|
serializer_class = BrandSerializer
|
||||||
search_fields = [
|
search_fields = [
|
||||||
"domain",
|
"domain",
|
||||||
"branding_title",
|
"branding_title",
|
||||||
"web_certificate__name",
|
"web_certificate__name",
|
||||||
]
|
]
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
"tenant_uuid",
|
"brand_uuid",
|
||||||
"domain",
|
"domain",
|
||||||
"default",
|
"default",
|
||||||
"branding_title",
|
"branding_title",
|
||||||
@ -120,7 +130,6 @@ class TenantViewSet(UsedByMixin, ModelViewSet):
|
|||||||
"flow_unenrollment",
|
"flow_unenrollment",
|
||||||
"flow_user_settings",
|
"flow_user_settings",
|
||||||
"flow_device_code",
|
"flow_device_code",
|
||||||
"event_retention",
|
|
||||||
"web_certificate",
|
"web_certificate",
|
||||||
]
|
]
|
||||||
ordering = ["domain"]
|
ordering = ["domain"]
|
||||||
@ -128,10 +137,10 @@ class TenantViewSet(UsedByMixin, ModelViewSet):
|
|||||||
filter_backends = [SecretKeyFilter, OrderingFilter, SearchFilter]
|
filter_backends = [SecretKeyFilter, OrderingFilter, SearchFilter]
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses=CurrentTenantSerializer(many=False),
|
responses=CurrentBrandSerializer(many=False),
|
||||||
)
|
)
|
||||||
@action(methods=["GET"], detail=False, permission_classes=[AllowAny])
|
@action(methods=["GET"], detail=False, permission_classes=[AllowAny])
|
||||||
def current(self, request: Request) -> Response:
|
def current(self, request: Request) -> Response:
|
||||||
"""Get current tenant"""
|
"""Get current brand"""
|
||||||
tenant: Tenant = request._request.tenant
|
brand: Brand = request._request.brand
|
||||||
return Response(CurrentTenantSerializer(tenant).data)
|
return Response(CurrentBrandSerializer(brand).data)
|
11
authentik/brands/apps.py
Normal file
11
authentik/brands/apps.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""authentik brands app"""
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikBrandsConfig(AppConfig):
|
||||||
|
"""authentik Brand app"""
|
||||||
|
|
||||||
|
name = "authentik.brands"
|
||||||
|
label = "authentik_brands"
|
||||||
|
verbose_name = "authentik Brands"
|
27
authentik/brands/middleware.py
Normal file
27
authentik/brands/middleware.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
"""Inject brand into current request"""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from django.http.request import HttpRequest
|
||||||
|
from django.http.response import HttpResponse
|
||||||
|
from django.utils.translation import activate
|
||||||
|
|
||||||
|
from authentik.brands.utils import get_brand_for_request
|
||||||
|
|
||||||
|
|
||||||
|
class BrandMiddleware:
|
||||||
|
"""Add current brand to http request"""
|
||||||
|
|
||||||
|
get_response: Callable[[HttpRequest], HttpResponse]
|
||||||
|
|
||||||
|
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||||
|
self.get_response = get_response
|
||||||
|
|
||||||
|
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
if not hasattr(request, "brand"):
|
||||||
|
brand = get_brand_for_request(request)
|
||||||
|
request.brand = brand
|
||||||
|
locale = brand.default_locale
|
||||||
|
if locale != "":
|
||||||
|
activate(locale)
|
||||||
|
return self.get_response(request)
|
@ -10,11 +10,11 @@ import authentik.lib.utils.time
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_tenants", "0001_initial"),
|
("authentik_brands", "0001_initial"),
|
||||||
("authentik_tenants", "0002_default"),
|
("authentik_brands", "0002_default"),
|
||||||
("authentik_tenants", "0003_tenant_branding_favicon"),
|
("authentik_brands", "0003_tenant_branding_favicon"),
|
||||||
("authentik_tenants", "0004_tenant_event_retention"),
|
("authentik_brands", "0004_tenant_event_retention"),
|
||||||
("authentik_tenants", "0005_tenant_web_certificate"),
|
("authentik_brands", "0005_tenant_web_certificate"),
|
||||||
]
|
]
|
||||||
|
|
||||||
initial = True
|
initial = True
|
||||||
@ -25,7 +25,7 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="Tenant",
|
name="Brand",
|
||||||
fields=[
|
fields=[
|
||||||
(
|
(
|
||||||
"tenant_uuid",
|
"tenant_uuid",
|
||||||
@ -37,7 +37,7 @@ class Migration(migrations.Migration):
|
|||||||
"domain",
|
"domain",
|
||||||
models.TextField(
|
models.TextField(
|
||||||
help_text=(
|
help_text=(
|
||||||
"Domain that activates this tenant. Can be a superset, i.e. `a.b` for"
|
"Domain that activates this brand. Can be a superset, i.e. `a.b` for"
|
||||||
" `aa.b` and `ba.b`"
|
" `aa.b` and `ba.b`"
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -53,7 +53,7 @@ class Migration(migrations.Migration):
|
|||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_authentication",
|
related_name="brand_authentication",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -62,7 +62,7 @@ class Migration(migrations.Migration):
|
|||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_invalidation",
|
related_name="brand_invalidation",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -71,7 +71,7 @@ class Migration(migrations.Migration):
|
|||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_recovery",
|
related_name="brand_recovery",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -80,23 +80,23 @@ class Migration(migrations.Migration):
|
|||||||
models.ForeignKey(
|
models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_unenrollment",
|
related_name="brand_unenrollment",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
"verbose_name": "Tenant",
|
"verbose_name": "Brand",
|
||||||
"verbose_name_plural": "Tenants",
|
"verbose_name_plural": "Brands",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="branding_favicon",
|
name="branding_favicon",
|
||||||
field=models.TextField(default="/static/dist/assets/icons/icon.png"),
|
field=models.TextField(default="/static/dist/assets/icons/icon.png"),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="event_retention",
|
name="event_retention",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
default="days=365",
|
default="days=365",
|
||||||
@ -108,7 +108,7 @@ class Migration(migrations.Migration):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="web_certificate",
|
name="web_certificate",
|
||||||
field=models.ForeignKey(
|
field=models.ForeignKey(
|
||||||
default=None,
|
default=None,
|
@ -8,17 +8,17 @@ class Migration(migrations.Migration):
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_stages_prompt", "0007_prompt_placeholder_expression"),
|
("authentik_stages_prompt", "0007_prompt_placeholder_expression"),
|
||||||
("authentik_flows", "0021_auto_20211227_2103"),
|
("authentik_flows", "0021_auto_20211227_2103"),
|
||||||
("authentik_tenants", "0001_squashed_0005_tenant_web_certificate"),
|
("authentik_brands", "0001_squashed_0005_tenant_web_certificate"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="flow_user_settings",
|
name="flow_user_settings",
|
||||||
field=models.ForeignKey(
|
field=models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_user_settings",
|
related_name="brand_user_settings",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
@ -5,12 +5,12 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_tenants", "0002_tenant_flow_user_settings"),
|
("authentik_brands", "0002_tenant_flow_user_settings"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="attributes",
|
name="attributes",
|
||||||
field=models.JSONField(blank=True, default=dict),
|
field=models.JSONField(blank=True, default=dict),
|
||||||
),
|
),
|
@ -7,17 +7,17 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_flows", "0023_flow_denied_action"),
|
("authentik_flows", "0023_flow_denied_action"),
|
||||||
("authentik_tenants", "0003_tenant_attributes"),
|
("authentik_brands", "0003_tenant_attributes"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="tenant",
|
model_name="brand",
|
||||||
name="flow_device_code",
|
name="flow_device_code",
|
||||||
field=models.ForeignKey(
|
field=models.ForeignKey(
|
||||||
null=True,
|
null=True,
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
related_name="tenant_device_code",
|
related_name="brand_device_code",
|
||||||
to="authentik_flows.flow",
|
to="authentik_flows.flow",
|
||||||
),
|
),
|
||||||
),
|
),
|
21
authentik/brands/migrations/0005_tenantuuid_to_branduuid.py
Normal file
21
authentik/brands/migrations/0005_tenantuuid_to_branduuid.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 4.2.7 on 2023-12-12 06:41
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_brands", "0004_tenant_flow_device_code"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="brand",
|
||||||
|
old_name="tenant_uuid",
|
||||||
|
new_name="brand_uuid",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="brand",
|
||||||
|
name="event_retention",
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 5.0.4 on 2024-04-18 18:56
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_brands", "0005_tenantuuid_to_branduuid"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="brand",
|
||||||
|
index=models.Index(fields=["domain"], name="authentik_b_domain_b9b24a_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="brand",
|
||||||
|
index=models.Index(fields=["default"], name="authentik_b_default_3ccf12_idx"),
|
||||||
|
),
|
||||||
|
]
|
0
authentik/brands/migrations/__init__.py
Normal file
0
authentik/brands/migrations/__init__.py
Normal file
90
authentik/brands/models.py
Normal file
90
authentik/brands/models.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
"""brand models"""
|
||||||
|
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework.serializers import Serializer
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.flows.models import Flow
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class Brand(SerializerModel):
|
||||||
|
"""Single brand"""
|
||||||
|
|
||||||
|
brand_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
domain = models.TextField(
|
||||||
|
help_text=_(
|
||||||
|
"Domain that activates this brand. Can be a superset, i.e. `a.b` for `aa.b` and `ba.b`"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
default = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
branding_title = models.TextField(default="authentik")
|
||||||
|
|
||||||
|
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
||||||
|
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
||||||
|
|
||||||
|
flow_authentication = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
||||||
|
)
|
||||||
|
flow_invalidation = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_invalidation"
|
||||||
|
)
|
||||||
|
flow_recovery = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_recovery"
|
||||||
|
)
|
||||||
|
flow_unenrollment = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_unenrollment"
|
||||||
|
)
|
||||||
|
flow_user_settings = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_user_settings"
|
||||||
|
)
|
||||||
|
flow_device_code = models.ForeignKey(
|
||||||
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code"
|
||||||
|
)
|
||||||
|
|
||||||
|
web_certificate = models.ForeignKey(
|
||||||
|
CertificateKeyPair,
|
||||||
|
null=True,
|
||||||
|
default=None,
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
help_text=_("Web Certificate used by the authentik Core webserver."),
|
||||||
|
)
|
||||||
|
attributes = models.JSONField(default=dict, blank=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> Serializer:
|
||||||
|
from authentik.brands.api import BrandSerializer
|
||||||
|
|
||||||
|
return BrandSerializer
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_locale(self) -> str:
|
||||||
|
"""Get default locale"""
|
||||||
|
try:
|
||||||
|
return self.attributes.get("settings", {}).get("locale", "")
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
if self.default:
|
||||||
|
return "Default brand"
|
||||||
|
return f"Brand {self.domain}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Brand")
|
||||||
|
verbose_name_plural = _("Brands")
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["domain"]),
|
||||||
|
models.Index(fields=["default"]),
|
||||||
|
]
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user