Compare commits
2134 Commits
sources/ld
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
8469213d82 | |||
78f7b04d5a | |||
22e586bd8c | |||
8a0b31b922 | |||
359b343f51 | |||
b727656b05 | |||
8f09c2c21c | |||
8f207c7504 | |||
34d30bb549 | |||
b4f04881e0 | |||
5314485426 | |||
ad6b6e4576 | |||
fb9aa9d7f7 | |||
fe7662f80d | |||
d6904b6aa1 | |||
cd581efacd | |||
6c159d120b | |||
4ddd4e7f88 | |||
441912414f | |||
9e177ed5c0 | |||
881548176f | |||
56739d0dc4 | |||
b23972e9c9 | |||
0a9595089e | |||
72c22b5fab | |||
84cdbb0a03 | |||
9fc659f121 | |||
db6abf61b8 | |||
6426a1d177 | |||
9075270b01 | |||
d17a39a431 | |||
db1d091d2e | |||
f98204e78e | |||
3f663cab0f | |||
3fe129e107 | |||
f26d41aef9 | |||
5d8b5998ae | |||
7a5e136346 | |||
bfbab6357a | |||
5997b93f15 | |||
6cdae09dc0 | |||
ff0ef7a2b3 | |||
3986104a20 | |||
1aa60e7864 | |||
045578dd07 | |||
f23d70dc75 | |||
496f3426d9 | |||
17acc9457d | |||
2996f20b74 | |||
dd86a90225 | |||
3b1034b9a2 | |||
ba87fd8714 | |||
ccebe355aa | |||
49fe670932 | |||
f1d173f94e | |||
19e0a282c6 | |||
234f06a362 | |||
0bbbc7def2 | |||
43fd3eecda | |||
631b120e4f | |||
9ea517d606 | |||
7b7a7e3073 | |||
ca3cdc3fd2 | |||
6e12277903 | |||
2f42144b33 | |||
eef02f2892 | |||
b6157ecaf1 | |||
35cd126406 | |||
f89a4fc276 | |||
4d7f380b2d | |||
cb8379031a | |||
0c604ceba4 | |||
30e39c75ff | |||
6d7bebbcc3 | |||
dc332ec7b0 | |||
31e94a2814 | |||
eb08214f0e | |||
a5ab8a618e | |||
b8cbdcae22 | |||
ae86184511 | |||
b704388c2f | |||
a35f9fdd7b | |||
d95220be0e | |||
ba1b86efa1 | |||
cd93de1141 | |||
cc148bd552 | |||
8f82dac84e | |||
89c08f2588 | |||
113d6cc45f | |||
05cfbca5f2 | |||
385f9bcbac | |||
86bc5b4cdb | |||
8d5d9f35ef | |||
439169d5e7 | |||
e405b33f9f | |||
a9c13d4d10 | |||
12b16b17a2 | |||
3473abee32 | |||
b5a0b3a521 | |||
837a0325ca | |||
b1050e8825 | |||
7bb90b1661 | |||
8f755785ea | |||
adc0ec8ee4 | |||
9379b3d178 | |||
18eef8ae05 | |||
f0e22fd08b | |||
ce5297a6cd | |||
74c8df8782 | |||
2d897fac48 | |||
2eb47c9efa | |||
9852041f38 | |||
7f74936212 | |||
4f40b1e27c | |||
041e407153 | |||
31b891428e | |||
4ab8247847 | |||
af24edf8c1 | |||
f8bfd12e31 | |||
993fc4b77b | |||
3ee2be09bf | |||
b3e3948f44 | |||
117a5cd88d | |||
342a40212e | |||
15ae11d9d5 | |||
e11df56f21 | |||
3771af5282 | |||
0718053c56 | |||
3e5014bfea | |||
78f49ddc04 | |||
744bc54231 | |||
44ba30ad75 | |||
63b991f137 | |||
7f9acb8a2b | |||
ce3ba32044 | |||
25e6a69331 | |||
78d07cc355 | |||
3c4df47fe3 | |||
8ed1805cb8 | |||
4d23db73ca | |||
72783953fb | |||
3b0fdb3dbd | |||
23161eed12 | |||
8918427588 | |||
5d858020f6 | |||
198e8b98a8 | |||
88e9c9b669 | |||
0c652a210d | |||
105a90d2e7 | |||
68f5abe318 | |||
59d4c18636 | |||
b67e2a1144 | |||
fc025651ce | |||
7cedc840b0 | |||
5ba731e48b | |||
c792534a50 | |||
a136fd8b54 | |||
fb63c1f3e9 | |||
0b15ab3f27 | |||
f9fd67c2b8 | |||
4ac19f9d44 | |||
74d3e92bac | |||
207fa13405 | |||
208e4a8bed | |||
ee245ab390 | |||
60c8837082 | |||
6cf418a37e | |||
254761e930 | |||
d85f8758fe | |||
84bfb37b60 | |||
578ff13868 | |||
d12acb5bcd | |||
0e8b9a6409 | |||
6171443e61 | |||
5fedd616d9 | |||
5dd6498694 | |||
cf5102ed20 | |||
d3b2032c33 | |||
1e5df1c405 | |||
96eabe269c | |||
3e869a0ec7 | |||
7276a416f6 | |||
a989390533 | |||
562c52a48b | |||
c3cb9bc778 | |||
5f65a7c6cc | |||
95d26563e7 | |||
1cac1492d7 | |||
6c1ac48bd9 | |||
97f11f7aa8 | |||
6db763f7dc | |||
16b5f692ee | |||
80c1bd690c | |||
040dcaa9d6 | |||
66a16752e4 | |||
70c0e1be99 | |||
5beea4624f | |||
50fffa72cc | |||
dae4bf0d6b | |||
823851652e | |||
ae7f7c9930 | |||
5ce4ed4dd3 | |||
5582cc7745 | |||
c384ed5f52 | |||
02e2ba8971 | |||
925d5c80df | |||
1de69a7bd6 | |||
c6979a48e0 | |||
6e73d60305 | |||
f388cac07c | |||
cf593e5cb9 | |||
c3a98e5d5f | |||
1048729599 | |||
72442b37e5 | |||
211cdb3a21 | |||
4cca16750e | |||
b2d261dd1c | |||
0663100429 | |||
66c3261eeb | |||
bf7570bc36 | |||
20b52d0dbd | |||
a1f5e284c4 | |||
0e4737d38f | |||
609b10f7f8 | |||
2cff3d15e7 | |||
4f1d49417c | |||
0766a47b4f | |||
bd1ddfebd6 | |||
a841743c74 | |||
0974456ac8 | |||
d44d5a44a1 | |||
edf5c8686a | |||
70ace8b209 | |||
c3509e63af | |||
89b8206176 | |||
908d87c142 | |||
4ab4e81fb0 | |||
6dae1a4fe7 | |||
d11de73e95 | |||
b08fb5fdf1 | |||
3c9e8c7287 | |||
691d0be41e | |||
dfbaccbab6 | |||
f3bdb189f6 | |||
85b3523639 | |||
9ff61a7120 | |||
f742b986a7 | |||
177bdfa689 | |||
c3445374c2 | |||
c2da6822dc | |||
493294ef9f | |||
17f807e8b0 | |||
96eb98500c | |||
ddd75f6d09 | |||
fbad02fac1 | |||
fbab822db1 | |||
d8316eea9b | |||
8182c9f7c2 | |||
5d94b97e97 | |||
35ddbb6d75 | |||
2b8bc38fc3 | |||
9b0b504531 | |||
c312430007 | |||
4e65c205e3 | |||
372a66c876 | |||
3630349388 | |||
347746cbcd | |||
ef2e1ad27b | |||
8a6b34eb5c | |||
26f72bcac4 | |||
f04466b3be | |||
4ba53d2f08 | |||
7a13046a27 | |||
939e2c1edd | |||
cf06b4177a | |||
f8079d63fa | |||
576a56c562 | |||
cf9b14213e | |||
73cbdb77ed | |||
fd66be9fa2 | |||
96bf9ee898 | |||
6c4c535d57 | |||
0ed4bba5a5 | |||
6e31e5b889 | |||
a5467c6e19 | |||
09832355e3 | |||
6ffef878f0 | |||
644090dc58 | |||
d07508b9a4 | |||
44d7e81a93 | |||
2e91b9d035 | |||
964c6a1050 | |||
90a1c5ab85 | |||
8162c1ec86 | |||
ab46610d9b | |||
6909b58279 | |||
6d7a06227f | |||
1459a13991 | |||
1921ce39f6 | |||
263cff6393 | |||
5a61688472 | |||
919b56c466 | |||
db7bc8b7ad | |||
5768cb5858 | |||
5b77bc33c7 | |||
93650e925a | |||
83823068fe | |||
d922f41438 | |||
ef3d545d7a | |||
e9efbb2161 | |||
a3634ab21d | |||
f28209548b | |||
9589e04072 | |||
6490703ba3 | |||
ca2fce05f5 | |||
9dc813d9ab | |||
833c66a9dd | |||
5d54f696d4 | |||
0746652995 | |||
a1a55c644a | |||
fce57d258e | |||
17e30b7adc | |||
c5b3f8a578 | |||
3a3619fa18 | |||
21b933efff | |||
97fc2cba69 | |||
7ef627d476 | |||
d16c603499 | |||
99a69bb52f | |||
ac8192d660 | |||
cdf3449230 | |||
ef2a40ed7d | |||
09cacbd76b | |||
cb33f0d1e2 | |||
90af4b29a6 | |||
6b9158591e | |||
9c15cda191 | |||
046b8d5cbf | |||
8b74b83983 | |||
8de038b387 | |||
2edc651582 | |||
85594a119c | |||
1a97ccea03 | |||
99ad492951 | |||
ff4ec6f9b4 | |||
0c49de67b8 | |||
0d73528ec7 | |||
80ca4e5722 | |||
1ebe200a46 | |||
5683c81f27 | |||
f0477309d5 | |||
04d613d213 | |||
b5928c2f7f | |||
c8e7247d2c | |||
ac6266a23a | |||
88213f67ee | |||
f8fd17f77e | |||
7f127ee515 | |||
ed214b4ac8 | |||
aeb1b450eb | |||
18b4b2d7b2 | |||
a140bad8fb | |||
bb1b8ab7bb | |||
6802614fbf | |||
619113e810 | |||
a8697bf1ad | |||
f52dec4b7e | |||
6560bf18a4 | |||
315cd40e6a | |||
a7a62b5005 | |||
37e3998211 | |||
31be26ebbd | |||
42b1cb06fb | |||
066ec35adf | |||
87a808a747 | |||
d8b1cd757e | |||
b1b9c8e0e5 | |||
a0a617055b | |||
9ec6f548a6 | |||
46980db582 | |||
d8fd1ddec6 | |||
74d29e2374 | |||
801a28ef65 | |||
3fff090612 | |||
b071d55b4d | |||
244cbc5b6d | |||
74da359dd5 | |||
56b73e3bd5 | |||
59e3c85568 | |||
746c933e63 | |||
f165bbca5d | |||
f335b08ec2 | |||
6e831a4253 | |||
6c1687c569 | |||
09c64e2354 | |||
0a312821ee | |||
06d1062423 | |||
dcfa3dc88a | |||
c45bb8e985 | |||
3e4fea875a | |||
c7670d271a | |||
570f3a4d42 | |||
3c54e94c6e | |||
26daaeb57d | |||
a60442fc2c | |||
8790f7059a | |||
49cf10e9bd | |||
13da6f5151 | |||
a1e0564f8f | |||
55f3664063 | |||
baabd8614f | |||
79df24f4eb | |||
f1afc4d263 | |||
643a256f01 | |||
b7f92ef0ea | |||
e33ca93f05 | |||
79af8b8638 | |||
d2b8bd3635 | |||
02e01559f4 | |||
b0c39e4843 | |||
039570a140 | |||
fdc7dedc58 | |||
098fcdeaf2 | |||
3cf9278bea | |||
13ccb352d7 | |||
c5b099856d | |||
6d912be7f6 | |||
0c54d266d3 | |||
c4784cf383 | |||
44ccbe2fdf | |||
d2615f0d6a | |||
5ab3cf4952 | |||
1926a472cd | |||
d220ca6bab | |||
759ea731bf | |||
e01fd5eb1a | |||
e716e24ec6 | |||
e9c84b8bfb | |||
130adf9d26 | |||
6aab505cd7 | |||
a9c597bc08 | |||
853239dff9 | |||
8f8c3e4944 | |||
dde9960b9c | |||
b1e48a6c1a | |||
b704e9031e | |||
15ef5dc792 | |||
6c4a1850b0 | |||
183d036f3c | |||
b324dc0ce2 | |||
6ad7be65ec | |||
8bf335a2a5 | |||
45709770f4 | |||
6158dd80ca | |||
468d26c587 | |||
c39a97ca58 | |||
8f0810ebb3 | |||
98e0f12d17 | |||
8d37e83df7 | |||
a306bb8384 | |||
c80116475b | |||
2997382df2 | |||
65e48907d3 | |||
1c4848ed8f | |||
64f7fa62dd | |||
16abaa8016 | |||
4cc4a3e4b8 | |||
8abe1f61ea | |||
6712095d7e | |||
5ab308bfd7 | |||
8b93fbcc69 | |||
f641670139 | |||
80af26ef50 | |||
64ce170882 | |||
b6171aa1a4 | |||
087582abbd | |||
6b6d88b81b | |||
55e5d36df5 | |||
fc43e841c9 | |||
895ed6fbdc | |||
f3965261c5 | |||
34ee6dc2b7 | |||
55fe4b0bc0 | |||
8d745609f9 | |||
55edb10da0 | |||
66e4b3af36 | |||
d44fc7790e | |||
291972628a | |||
019221c433 | |||
b99fa9f8f8 | |||
5bde2772c3 | |||
10884a7770 | |||
e858d09d28 | |||
856717395e | |||
b7793200de | |||
bcc0323523 | |||
643c1f5bbf | |||
1fca246839 | |||
b73e68a94c | |||
f9d3c4c9a7 | |||
53f8699deb | |||
6f3dc2eafd | |||
567ed07fe8 | |||
2999e9d006 | |||
b32a228e3a | |||
5a2dfb23c6 | |||
8ebce479bd | |||
81589e835e | |||
22b1f39b91 | |||
c25e982f1f | |||
d5c09fae8a | |||
bf15e04053 | |||
0932622567 | |||
0a5b8bea5d | |||
64d4a19ccf | |||
82875cfc0e | |||
83776b9f08 | |||
a742331484 | |||
2e9df96a62 | |||
9f5d7089c3 | |||
ddc78cc297 | |||
cb9b3407d8 | |||
d7b872c1e0 | |||
c35217f581 | |||
3b73a2eb9d | |||
3b94ffa705 | |||
936102f6d9 | |||
8c687d81aa | |||
01d7263484 | |||
49ac0eb662 | |||
8935ca65a7 | |||
58a374d1f1 | |||
f409831921 | |||
951acb26dd | |||
2df0c95806 | |||
f8d1b7b9b7 | |||
e092aabb21 | |||
48c59a815d | |||
9f40716a87 | |||
39da241298 | |||
a71a87fa3e | |||
176fe2f6fc | |||
4544f475c9 | |||
5bbf59b2bd | |||
1b2f1db711 | |||
14fab991b4 | |||
444e0642d0 | |||
89c841b530 | |||
d3ea465d86 | |||
5c38e03820 | |||
3a400da931 | |||
149481f787 | |||
ba368552f2 | |||
fbf0e4a966 | |||
2f7d9a44ad | |||
cad5ff36bd | |||
5024c757fa | |||
68af2e5352 | |||
d21c87eda0 | |||
f20bfc543c | |||
1e69cefd7c | |||
4e64258c91 | |||
4e63f9c250 | |||
0e2ac8afab | |||
33fa159dad | |||
0452eb3e5f | |||
05e3d8db83 | |||
1c9f86e172 | |||
48d1b289a3 | |||
4c23ef4e9e | |||
3c28cf1909 | |||
4a9c95b44e | |||
bcc8d5e76c | |||
85fedec2f6 | |||
bc9984f516 | |||
6ddfe1795c | |||
729ca4f65c | |||
af19b1633a | |||
e385b83318 | |||
b1c396b6d3 | |||
f11b7ca0d8 | |||
55a8db967a | |||
79744df87e | |||
0a662498a7 | |||
b1c38cbb07 | |||
8f2b7f2f49 | |||
52851046ce | |||
8e4929c3b0 | |||
6df28758f0 | |||
7ef14eb86d | |||
e03c25a600 | |||
45de247efd | |||
203dc88bb5 | |||
a01df92007 | |||
cb29d9456f | |||
5bf2bdbb48 | |||
0ad22f7e1c | |||
23c4e150cf | |||
afa1b27582 | |||
ee9d14d32b | |||
30982c833a | |||
ab3f0b50e1 | |||
49e1f4739e | |||
fc35497423 | |||
c379787a90 | |||
97bc679cbb | |||
0ea1c8f138 | |||
9d909a73b2 | |||
c89b7b74e0 | |||
6b629d8a9b | |||
75d602dd90 | |||
fd44bc2bec | |||
35448f6017 | |||
a70363bd95 | |||
40c672f246 | |||
2619562530 | |||
b7b9c521e5 | |||
9c7fd01117 | |||
a12bb1642d | |||
80e8de2548 | |||
0b80f1a7c7 | |||
2698d90637 | |||
633028c8ff | |||
155f1e50b4 | |||
a48e7c17ba | |||
cfb920114a | |||
0bfce6e29d | |||
5139bc9a80 | |||
d24fe25047 | |||
54387a7ab8 | |||
e1fd6cbd31 | |||
a030f04ccb | |||
9d6e58b3d8 | |||
728b64ffc3 | |||
284904a02a | |||
2ec8a445c3 | |||
16b8edd082 | |||
91012b577b | |||
5a5a2a5d69 | |||
d7750d34de | |||
c3ea09a2dd | |||
9f6dca1170 | |||
919a190971 | |||
e5d93a66cc | |||
a57d06b401 | |||
c2909dda1d | |||
a53f25c92f | |||
696f8afcd5 | |||
e2a81df152 | |||
1752c5437c | |||
54b951d3cc | |||
fcf752905b | |||
a4a5b97265 | |||
4087d6892f | |||
2b7e06bb25 | |||
1b2180818e | |||
8c23b390c2 | |||
677e5f3c84 | |||
2b860f19cb | |||
9a583dcd44 | |||
8aae51ab26 | |||
e769f7ee02 | |||
852f6f2819 | |||
6cb6db0bc4 | |||
2e0907af1e | |||
35f29656bc | |||
bb1f18d973 | |||
261eebe127 | |||
8f0ea8dcd9 | |||
eb36c8dd0c | |||
7026760327 | |||
b2349ee99f | |||
d185b3ad70 | |||
8ecf1cadf8 | |||
9961142ab1 | |||
ae299c55c7 | |||
72f60249b8 | |||
d6fed2eaaf | |||
b48ed34ac4 | |||
561691ac22 | |||
0d4511e74b | |||
4e7153c45f | |||
d0a6a067cc | |||
742beda313 | |||
70462c4c72 | |||
7ea721c487 | |||
4d8ee983ef | |||
0bbf37f535 | |||
8e847d05bd | |||
55612a1365 | |||
a1d8b2045d | |||
f1f4cd7d2c | |||
4350bcbe24 | |||
a899495978 | |||
6302ff23d2 | |||
2196bde820 | |||
a861030ecb | |||
c96e195666 | |||
d7d32075e7 | |||
f527700426 | |||
d2bf027c23 | |||
ac1f3332dc | |||
2c64f72ebc | |||
51a8670a13 | |||
b9f6cd9226 | |||
7010682122 | |||
0e82facfb4 | |||
afdff95453 | |||
b11f12b1db | |||
4df906e32c | |||
fee7abed7c | |||
d1a5d0dd7e | |||
d1e06b1c7e | |||
458b2b5c55 | |||
c0b1cd7674 | |||
8305a52ae2 | |||
b77cdfe96b | |||
0dcb261b4c | |||
46bddbf067 | |||
b8b6c0cd98 | |||
64fbbcf3e8 | |||
a4c6b76686 | |||
c8c7f77813 | |||
dde4314127 | |||
0b620f54f3 | |||
dc10ab0e66 | |||
8d92e3d78d | |||
ae66df6d9a | |||
ed3108fbd4 | |||
f2199f1712 | |||
e5810b31c5 | |||
d8b6a06522 | |||
c8ab6c728d | |||
e854623967 | |||
0b4822c1e3 | |||
fcb82c243f | |||
4415bee62a | |||
42b00110e7 | |||
0cce67dd15 | |||
f7a300fb30 | |||
ca260b700f | |||
8e9fbff5bd | |||
f2a8b82249 | |||
11a3cf84fa | |||
d506e5d50c | |||
7f8b8a7eb5 | |||
06af8e3a35 | |||
bf8c3078db | |||
15c7be1979 | |||
285dc8cff0 | |||
d7e399dbf9 | |||
1e25d3e3e9 | |||
d5c0a6e252 | |||
8a5aa9bf6f | |||
6584074b9c | |||
1d773dfc76 | |||
193b9e1ae8 | |||
32f95818db | |||
bcb7c72907 | |||
51a33f330c | |||
da2eddfb5a | |||
75e9a02bd2 | |||
af239027d5 | |||
6ce83e5271 | |||
c804a7e77d | |||
9d9acab603 | |||
8e42eb0546 | |||
f39c0e6bd9 | |||
81ac09695a | |||
26c5653182 | |||
0f7a3875f7 | |||
0036ecf956 | |||
96554de17a | |||
fabd1e39ae | |||
f992bfa8ff | |||
f1a04674fb | |||
ec4c31e37d | |||
ac520cd872 | |||
50e493d692 | |||
d49d8bc559 | |||
3e94b58afb | |||
1b81973358 | |||
880ca9a57d | |||
4d8d12f917 | |||
e78e4165da | |||
e4c7c24ae4 | |||
3b8daf7cc9 | |||
104e70c383 | |||
82ac7d195d | |||
d19d075326 | |||
ae03e4679e | |||
05b0e2c164 | |||
ff5680fb0e | |||
88cf0efb81 | |||
7783b200a3 | |||
d13954970e | |||
743a781eba | |||
f53f3c77be | |||
61b61ce960 | |||
09e6b80fd6 | |||
4cad5f7b40 | |||
3f43ff22a8 | |||
cf6bbbae70 | |||
ac1ef5139c | |||
ce0775239d | |||
56f267146f | |||
d98af5a0b1 | |||
3b3c874175 | |||
1f19e5cb3e | |||
f2062e75a1 | |||
ff5df458af | |||
6a8c5ca650 | |||
01a3516478 | |||
868ce06f67 | |||
e5b6dc5508 | |||
ee86322ab4 | |||
52d19bf4a6 | |||
fdcc1dcb36 | |||
5805ac83f7 | |||
772048092b | |||
be1219a73f | |||
9ab057fafc | |||
f9b6c8cef9 | |||
f159973d8b | |||
4a2f97710e | |||
735a8e77e2 | |||
e50cc20f76 | |||
5c19c6ea7f | |||
4c0b6c71ac | |||
cfc065b41b | |||
d81381bda6 | |||
6613553c13 | |||
9a304cc198 | |||
ebaec17703 | |||
6fcc06bfe0 | |||
2ba66f4f91 | |||
f9fc32e89c | |||
ee275d36bf | |||
ed39123f4e | |||
68726b0921 | |||
74a91aafe8 | |||
a15853ed55 | |||
7c51657aa1 | |||
86e9639d0c | |||
1620131ed5 | |||
743ee53bd9 | |||
7a04d97bdf | |||
6c99194f42 | |||
df8321c282 | |||
9bfbf0ed07 | |||
8f5606edbd | |||
a0f921398f | |||
bf287ab8c4 | |||
cec11f3843 | |||
f66bad43db | |||
b36ed44ca2 | |||
f5aca42e95 | |||
6e9ae69593 | |||
3c0cb1dd12 | |||
de56d02230 | |||
c04e8869f7 | |||
9d60d0b4c5 | |||
a42b181b76 | |||
24657797ad | |||
3981b55b40 | |||
d98471dbea | |||
9cd94f639c | |||
afd950c671 | |||
9328db4c19 | |||
7b40e23840 | |||
8ca7bdcd36 | |||
d51491e1eb | |||
6e1807e51d | |||
785ff6b3df | |||
408016a34e | |||
fc77fa68d1 | |||
48b24e5c65 | |||
b2045fd034 | |||
782e9fadb5 | |||
e48ac56cc5 | |||
f110eda465 | |||
e830d5dc7a | |||
2b1f8ac050 | |||
e8d5d678bf | |||
6df5de861c | |||
c35ae4af3e | |||
ae123a3364 | |||
e155aa5f3e | |||
6fa2765f55 | |||
ecb84dda46 | |||
62e58f2fe9 | |||
0a4e34a142 | |||
1be50bcdb2 | |||
f0c33ef1bf | |||
b059754fe5 | |||
cd4d6483c5 | |||
8dcccb4444 | |||
693da3ee62 | |||
5a9e1a0c94 | |||
a539e4b362 | |||
d13fb1d53d | |||
d9cb82ca6c | |||
8f231e5678 | |||
604242a76c | |||
0d0d33f104 | |||
58907a2b3f | |||
83f6ec86d4 | |||
f832f702cb | |||
2eb55696ed | |||
2ef31322c4 | |||
0d088ae198 | |||
a184240855 | |||
fdd941c84d | |||
419e0adff9 | |||
60a16aafbd | |||
1b24168791 | |||
8909c1e338 | |||
ea7c822d37 | |||
8c183a348f | |||
835208d616 | |||
6ec8143beb | |||
0f57ddefff | |||
dd37e8bf49 | |||
1f733b04f7 | |||
99c03d3073 | |||
dd3b440f8d | |||
feef105acf | |||
ed4154e62d | |||
e6c204cdba | |||
0e83d485a3 | |||
0000f26fee | |||
a74ab9d2c1 | |||
0ca96adaaf | |||
4cea9bfa3f | |||
59b5c21cf6 | |||
05fb11b1f0 | |||
17f9a48252 | |||
4b55746f6c | |||
9836dfcfd4 | |||
6501626692 | |||
184d65cc62 | |||
c93bb4708b | |||
e9d6da6c28 | |||
d7ed1a5d30 | |||
d29c3abc7d | |||
448e0fe067 | |||
faa02afae0 | |||
342eb03731 | |||
58388935b7 | |||
2e451f40e5 | |||
868229a044 | |||
73590572b0 | |||
5b6b059b40 | |||
060cea219b | |||
af9d82c02d | |||
cc8fb66da2 | |||
f0edc7b931 | |||
b39632abb0 | |||
c59b859ec0 | |||
a46939b591 | |||
bfb4a25026 | |||
646276b37c | |||
58f9d86d0b | |||
cf0a268fb1 | |||
ec783ae587 | |||
f50d44792c | |||
b225b0200e | |||
507f9b7ae2 | |||
5991b82cde | |||
f38bc8d09e | |||
9824f283de | |||
341d866c00 | |||
965ddcb564 | |||
a0a1a101e8 | |||
277c922ec3 | |||
f372627d61 | |||
1be86325d5 | |||
6d71454aa0 | |||
75d6aab0bb | |||
496dce093a | |||
f740ba0ffe | |||
a82af054a4 | |||
c80e3da644 | |||
af9bb566f8 | |||
5ca929417b | |||
3c1c44bda1 | |||
c05977f144 | |||
55333ef1ac | |||
49ad6d2aa8 | |||
b7e4373d6e | |||
699c074816 | |||
c26855f953 | |||
1457b38e7e | |||
55d08c5be3 | |||
ffbfbd43cb | |||
cb24fe5c5d | |||
aa81d8f12d | |||
2ee1a0241b | |||
89bc7a037d | |||
a21683555a | |||
5a98235ee0 | |||
3ce836fd8b | |||
5a5f7814ab | |||
907d475897 | |||
41503fc0b2 | |||
cfc7646a5a | |||
7103336456 | |||
48db4af56d | |||
8285b5d9a7 | |||
43218bd027 | |||
042fae143d | |||
f6f997525f | |||
753fb5e1b2 | |||
06a42df732 | |||
66a2a62c7b | |||
41bbbde232 | |||
373c0ff7d0 | |||
30345d450c | |||
b9dc83466d | |||
f26175a99f | |||
c7881e6eb4 | |||
97b98a4192 | |||
fc65d3f43a | |||
aa87695f3c | |||
c3fb84397a | |||
8d78cd97d0 | |||
24d2c4089c | |||
38f47c65a1 | |||
896096374c | |||
0e2326ed06 | |||
a07db454be | |||
87a4a81798 | |||
f0ee743ea1 | |||
fbac1e9d95 | |||
d8536ed78e | |||
848dae52ab | |||
f62a470dfa | |||
16a8409014 | |||
dfa5b8aba5 | |||
54270e960f | |||
6541b7fcef | |||
19af49a49b | |||
99e189cae3 | |||
6f68563df2 | |||
df03b2a156 | |||
e1211ba01b | |||
24ea3f0ee8 | |||
79045ab283 | |||
e27189364e | |||
ba224e4eb9 | |||
336950628e | |||
6ede552292 | |||
07b6356b38 | |||
4c5730a222 | |||
8ab84c8d91 | |||
89ef82337d | |||
babaeb2d0c | |||
52b8f24b75 | |||
464addfc8d | |||
8df73c2f6f | |||
9ab3971e63 | |||
09888cb89f | |||
2abcc9ce8f | |||
5b0e92f034 | |||
a3bfb3d25c | |||
2c1df6702c | |||
b999e23d27 | |||
e0db9f3ea1 | |||
dcc3ca664a | |||
7d37e3f668 | |||
e48f6bbec4 | |||
d27caaabc3 | |||
0dee706a87 | |||
7d527beea8 | |||
4733778460 | |||
c048f4a356 | |||
65e245c003 | |||
600d59ff58 | |||
703628f354 | |||
693de081ef | |||
f367249bab | |||
2841db082c | |||
ce24f974aa | |||
1f93e6fd3f | |||
7dfde9029f | |||
f5d62b828b | |||
703eb682b7 | |||
5cae3192b1 | |||
83e143032d | |||
e0e7cc24da | |||
8bc746d577 | |||
a84f403e79 | |||
e4f4482d2a | |||
844b4e96cd | |||
f3b4e03243 | |||
4f5e2a438e | |||
32c980e29e | |||
bd29392825 | |||
9756432876 | |||
8b2d1a9b21 | |||
adbd97323c | |||
77a8b2d751 | |||
08c850938b | |||
7db598c04e | |||
1ef224f5fd | |||
b01c48698d | |||
1546fa276a | |||
f50bd74b46 | |||
414a5c36c8 | |||
c4455b6915 | |||
9013caeab4 | |||
40a1e5a9b2 | |||
4dadcc1dfd | |||
0b8678f7ee | |||
aa8dc94a97 | |||
20996e994e | |||
db17f04830 | |||
b99fca62d8 | |||
8818ce3306 | |||
25d3f2e06e | |||
1537682026 | |||
ebd05be2c4 | |||
c90792d876 | |||
b92630804f | |||
1afd5ef95a | |||
e5cc2c6d98 | |||
84fdd4d737 | |||
5fe2772567 | |||
d2f9b66424 | |||
c9b39f2eba | |||
2ecc2119fc | |||
49b7ebdc53 | |||
70f72c524d | |||
87e0ac743a | |||
b5b8b0e9cd | |||
d10b358767 | |||
0887fa8fde | |||
799dd48861 | |||
919d1f349f | |||
a36b6e8315 | |||
69f9dfc9f6 | |||
27efe68f1c | |||
e9223618ba | |||
e9672a5285 | |||
7d724d9931 | |||
edcc6b2031 | |||
a71948c9b7 | |||
a395e347df | |||
f4b336a974 | |||
087d5aa7e7 | |||
3dc42bf542 | |||
cef1d2d0f1 | |||
961d28331b | |||
2bd2114617 | |||
9327ae8997 | |||
11e55a1b91 | |||
1bc6477f58 | |||
f2d2d84832 | |||
709436aab4 | |||
a315d1d966 | |||
81a22aed38 | |||
de4e18bf47 | |||
a25c00039d | |||
99076a3460 | |||
cf7094d00c | |||
51c4e88258 | |||
2dd69398c7 | |||
4a98493970 | |||
fbf06069f3 | |||
87ed848e56 | |||
8949464294 | |||
0b83d1008e | |||
cb72efb4c8 | |||
f0e4b742f8 | |||
df907c3ddd | |||
ee2f9eaf05 | |||
8739562367 | |||
ce4f086fe3 | |||
aa7dafe910 | |||
e8399b5657 | |||
2500cb2621 | |||
d20a4dc2b7 | |||
85776b254b | |||
1c84a20c43 | |||
c332f6f0ec | |||
be6c29d907 | |||
07ed5e1cd9 | |||
0052e60643 | |||
25e72558eb | |||
0413afc2a8 | |||
c953f5993d | |||
8124de52b0 | |||
dbf306672d | |||
50e7336720 | |||
f79054fe8c | |||
1db322b42f | |||
dca8c837f5 | |||
544373efd7 | |||
0c4dee8f9f | |||
09873558ae | |||
7e30ab54d6 | |||
c053eaf14d | |||
741a88ffa6 | |||
0b8af252b2 | |||
07cfabb7e6 | |||
5056233f9d | |||
6e5e82f8d0 | |||
725641270f | |||
dfc104d8eb | |||
dc24db43ff | |||
d1da50a479 | |||
11ca358242 | |||
85a8768424 | |||
7af35b56c1 | |||
b95d333179 | |||
d7f82ba31e | |||
eecb12285c | |||
699b7ed5c3 | |||
9d757a7345 | |||
03f64b60ec | |||
5f19fa7c84 | |||
5f4fb5e4ce | |||
ad0f49e1d2 | |||
68c1997837 | |||
4eceebaad8 | |||
2900504f27 | |||
645f662e3e | |||
5f1ba45966 | |||
dcbfe73891 | |||
2d62bad5fb | |||
ce1b2f4dcc | |||
d7b0d16830 | |||
be2e302104 | |||
7eb2de5821 | |||
b43341cfef | |||
45a3310320 | |||
cd04cb01cc | |||
96345b1626 | |||
035795d468 | |||
faa7f3768e | |||
b5203baba8 | |||
41f58fce5b | |||
6c8349a306 | |||
63acc24047 | |||
96b2a1a9ba | |||
c0562bf860 | |||
9ed2b74661 | |||
4f4815251e | |||
4184f8a770 | |||
a351e5e824 | |||
a92ff3b658 | |||
f0b38ce00e | |||
e8e6aa6617 | |||
2d36978eab | |||
28e4874434 | |||
1a919155cb | |||
a62cca36ad | |||
ff5d15b5e5 | |||
6844da4628 | |||
bf701c4f7b | |||
c03b9fdfeb | |||
830689f1cb | |||
862aece9fc | |||
45f9e6758b | |||
80b8404578 | |||
09ef9515af | |||
6303698ba8 | |||
abc0c2d2a2 | |||
73ddaf48be | |||
f8d992dedb | |||
53cbc64f9d | |||
af322b6de4 | |||
65d1042e04 | |||
2d8be43085 | |||
b1c7c228c3 | |||
6dd2fc3c45 | |||
2646469478 | |||
b16a0f48bd | |||
727e23419e | |||
f3d7343f93 | |||
2a87039a18 | |||
4509163e54 | |||
97c421f34b | |||
2bc865b024 | |||
0467df190e | |||
f3c9f5b2a3 | |||
ffa5eae47f | |||
fc25356fdf | |||
baee2572ec | |||
a9685adda5 | |||
8c1890ff25 | |||
05b0d474e0 | |||
5be9d1fc6a | |||
02791e765f | |||
abf1f0e348 | |||
86bf75fe54 | |||
b23a6d5359 | |||
212904537b | |||
c46cd5e7e5 | |||
480a765f5f | |||
65b6ea416c | |||
2f56c3cecf | |||
5c1432c670 | |||
c49e83a926 | |||
0dc2c46d49 | |||
245d8b7b5c | |||
5884af8af1 | |||
08d52be20d | |||
941f05e7fa | |||
493cefaa6e | |||
b75233cbf1 | |||
7c964e7058 | |||
8fe89133ac | |||
3295803b5c | |||
be65746ddb | |||
6a762906dc | |||
7448c69213 | |||
8026fa70ac | |||
e99bfaf17f | |||
210f27f71e | |||
11a51e1983 | |||
eb086af5e2 | |||
6b8e826c75 | |||
d8fd09345e | |||
3a068e9813 | |||
fadfd65136 | |||
fb896260c0 | |||
e8278c6da7 | |||
7c8f94bfe1 | |||
eeb9716173 | |||
73fb0551b1 | |||
379436655e | |||
2b1bb67844 | |||
b081d309c2 | |||
9a43dc9ad0 | |||
7c0a8f6b74 | |||
fc97777192 | |||
1bd6b33598 | |||
73702c23f7 | |||
40f74fc62a | |||
85f1584844 | |||
7c116acf0f | |||
85ebbcfb82 | |||
e51f900cdb | |||
134cd2dc81 | |||
7f70a86289 | |||
f77c2e8254 | |||
be66ee52cd | |||
d9eb4c5248 | |||
24c87a47a6 | |||
f44529769c | |||
4f739758e5 | |||
ec9908b317 | |||
38e1ad5ade | |||
7ea3830e56 | |||
7d75943cc2 | |||
6afa494b57 | |||
b0e74d348a | |||
0064c1df74 | |||
8831e1d946 | |||
d10ed974cb | |||
27ea604467 | |||
9b9d08c3db | |||
a65fde6374 | |||
391d477e45 | |||
d4fdfb70c1 | |||
2ce74ff9f0 | |||
3715a222fd | |||
e0370947ca | |||
fdefe23d46 | |||
42552e74eb | |||
66001a3e88 | |||
9de452853e | |||
50d3264be5 | |||
c78bb979ec | |||
6649f7ab72 | |||
9ecabe4629 | |||
6e83b890bd | |||
db95b967fe | |||
9ea41595b7 | |||
74b4fe5cee | |||
1ef654f19b | |||
f030128686 | |||
0fb7d1237f | |||
2497101c32 | |||
9a0feb1c35 | |||
ab411a6a9b | |||
a2dce3fb63 | |||
d555c0db41 | |||
c9dc500a2b | |||
7dff303572 | |||
c2433689cb | |||
cd0adfcfaa | |||
e5815154f9 | |||
058dda5d0a | |||
3e44e9d3f6 | |||
c77ea41af0 | |||
c8b35b9b21 | |||
78396717fe | |||
cd61cb3847 | |||
259d5e6181 | |||
67c130302d | |||
ffb78484da | |||
018cda43b7 | |||
268fb840fd | |||
053062f606 | |||
827591d376 | |||
509b502d3c | |||
1b36cb8331 | |||
a3ec4e7948 | |||
2064395434 | |||
519062bc39 | |||
116ac30c72 | |||
b93ad8615c | |||
d54b410429 | |||
1a6077c074 | |||
aa1bb7b9c9 | |||
10a5466436 | |||
9120cf8642 | |||
d711713785 | |||
7337f48d0a | |||
4d2c85c3a3 | |||
3906c3fc90 | |||
45dccd30f9 | |||
bdb5abaab0 | |||
4cd9b99de7 | |||
23e8fc5a49 | |||
b84facb9fc | |||
b778c35396 | |||
5304bd65f5 | |||
240cf6dd94 | |||
a365ec81f3 | |||
1ea3dae5ac | |||
255f217c26 | |||
1c3cce1f89 | |||
0e1646ca1b | |||
4c24be60ae | |||
0cf4acf31e | |||
6cd4c206aa | |||
cca556f766 | |||
64ca5d42be | |||
4b115e18fb | |||
e171e50821 | |||
fade781d96 | |||
f30bdaad1e | |||
afc968437d | |||
582016a586 | |||
9f1f3dca34 | |||
cb8a91170d | |||
ee6ac3c2d5 | |||
b82a3fe252 | |||
6c7eb03102 | |||
cdff826016 | |||
636bf078a0 | |||
7447122546 | |||
d535a82372 | |||
4a8c0f7f80 | |||
02869d8173 | |||
9a261c52d1 | |||
23c03d454e | |||
06df705240 | |||
48e5823ad6 | |||
e4d3365a1c | |||
6ff965f697 | |||
2ccdfa433a | |||
7778a8fab2 | |||
e9f9692c18 | |||
2521073dba | |||
ec8f2d4bf9 | |||
218d61648b | |||
96bdb0ddac | |||
d5ed55d074 | |||
38997a0faf | |||
f55bc04d04 | |||
50860d7ffe | |||
4ff3915d59 | |||
256187ebc6 | |||
6628088e3b | |||
90d88deb81 | |||
3afb1a2f21 | |||
82956d275a | |||
8525b3db01 | |||
0bf84b77d8 | |||
2d3b4ad8e2 | |||
4ff8d2fbbd | |||
b7532740ef | |||
67b47f42c7 | |||
3e530cf1b5 | |||
e86640e930 | |||
0156249123 | |||
4b8fb139ca | |||
b560afd35e | |||
0781659dd6 | |||
15c1f9c434 | |||
ce1d1310f9 | |||
9f59221e3c | |||
2ec979d490 | |||
df6c78dfa1 | |||
729ef4d786 | |||
ba174d810b | |||
1a21af0361 | |||
d3cbe26106 | |||
4a8fb8e14d | |||
0fb0745401 | |||
66413f09d4 | |||
3bec7c905e | |||
36de629899 | |||
764389a78f | |||
21eafc09ee | |||
f9b998e814 | |||
1e3feca4b6 | |||
03534ee713 | |||
cb906e1913 | |||
14fb34f492 | |||
a0269acb16 | |||
a7ba6f3263 | |||
23fb4d436d | |||
6d6219811e | |||
2337f5a173 | |||
026e80bd10 | |||
b181c551a5 | |||
f1b6793145 | |||
dfe3b7c705 | |||
24e23ba0f5 | |||
8975664d09 | |||
f327dfed30 | |||
c6c6646fd5 | |||
f85ae175d8 | |||
f996dc1bc3 | |||
72ce909ed4 | |||
a5a380db7b | |||
38272e8a68 | |||
333d781f92 | |||
f2aa83a731 | |||
1fccbaa693 | |||
6b272f4f00 | |||
1ca0664b75 | |||
b3a9e008dd | |||
8a82a66a95 | |||
035e9ddd13 | |||
8c724b6ac1 | |||
7829fcb48b | |||
e05aec1a44 | |||
fff0733d30 | |||
2b07d4cac9 | |||
2a15edccdc | |||
6ba1dfd166 | |||
45fff3cbbf | |||
7c1ae72d43 | |||
5f6ba74f64 | |||
ef8a119c44 | |||
a9dba4eb5c | |||
fef1090a4e | |||
23d3d6ddc4 | |||
036bd1862d | |||
a9369af647 | |||
5e33869457 | |||
94d2748ec0 | |||
5645651c5b | |||
2a8e90a438 | |||
ccd588c060 | |||
3cea974dfb | |||
6642bf3f81 | |||
b2fe58ae3a | |||
e30c4e2e54 | |||
9447ad82dc | |||
45f707e972 | |||
a88c19b333 | |||
704f591fa0 | |||
875a9dbc26 | |||
170ada4a98 | |||
944368c4f2 | |||
1d003c1c0c | |||
9250f5d8e6 | |||
459449a82b | |||
7efc30ab8e | |||
afdc7d241f | |||
067ddb6936 | |||
56a3804ff7 | |||
696c6e0630 | |||
c18dc2c71d | |||
af499736cc | |||
167378b027 | |||
60c49c1692 | |||
893b8376cf | |||
8daff472a5 | |||
6df1b23fa7 | |||
fb6c8045d5 | |||
e247169766 | |||
1fa409eaa2 | |||
35a74cc649 | |||
ef801b7a61 | |||
5b244a04f9 | |||
6bdbdaff31 | |||
2bc4506f9e | |||
8db380b2af | |||
8898709a9a | |||
69f9e2e50f | |||
56857bfa9e | |||
357c0db20c | |||
05aa661f7b | |||
d0cc862343 | |||
e1359771fd | |||
06d7d8e6d0 | |||
0ed5ad3a52 | |||
46f223bf79 | |||
f559d2531f | |||
5af4d35513 | |||
32262bb7c5 | |||
20ee087b9e | |||
067b156336 | |||
35be819fe8 | |||
0981462aef | |||
cad99521c6 | |||
b499aba4ed | |||
c0bd0bd04e | |||
49df3cb3c4 | |||
52e4a008d5 | |||
a07fbf5c02 | |||
96f96ba35d | |||
5b986f3668 | |||
8c4a43617b | |||
12f2484d08 | |||
8889e0d39a | |||
0797dec46b | |||
4e1e74142e | |||
8db34fc65b | |||
255e1430ed | |||
30f9d6bf83 | |||
0e244c3eba | |||
0e810c5887 | |||
644882456b | |||
a3d0b95d89 | |||
483e2fdf31 | |||
882826fe0e | |||
1173bcea7b | |||
c695ba91f9 | |||
c2e6f83d97 | |||
a9daba621d | |||
b29f4fd06e | |||
9f6d8255bc | |||
00bcda4426 | |||
6f8d21620b | |||
68d266a480 | |||
b0c00d0e6c | |||
86518ea825 | |||
39458a1ca5 | |||
18b2f489c0 | |||
2814a8e951 | |||
b88e39411c | |||
6a43721524 | |||
b7055c5838 | |||
2c6ac73e0a | |||
78f47a8726 | |||
c9e3f1f4fe | |||
98597a321a | |||
4b35464acf | |||
3023da916e | |||
2ec904692a | |||
a3f318d7bf | |||
e5e09d816d | |||
bb04b51521 | |||
0902aa408e | |||
4b88528178 | |||
86adfcd8a8 | |||
c443997cfe | |||
fc1a736816 | |||
73751e5cd9 | |||
bb52765f51 | |||
c0b7d32b36 | |||
6b78e6e283 | |||
47a6912f8b | |||
c274d7127a | |||
248a978a14 | |||
c2614dcf43 | |||
c6c228b448 | |||
cd94110fee | |||
6328c92316 | |||
e042b8ec92 | |||
a3ddf45074 | |||
f2e10c7d11 | |||
fe0d206656 | |||
f283160942 | |||
0c2bccb858 | |||
f432dbeca0 | |||
d0ada59f57 | |||
6e5eb67851 | |||
44fc9ee80c | |||
98a07cd0ef | |||
aecd4b52ef | |||
ce86b20e6b | |||
a48ccbc8a8 | |||
732b471309 | |||
78520b864c | |||
1a68d24c3c | |||
22739403b6 | |||
0e43255da9 | |||
b5e059dfd9 | |||
60af4a2e37 | |||
1e850cf9ef | |||
31ef91900b | |||
51908f6060 | |||
51d3511f8b | |||
6a8eae6780 | |||
99cecdb3ca | |||
bac7e034f8 | |||
3d66923310 | |||
95c71016ae | |||
5cfae6e117 | |||
a2e5de1656 | |||
0d59d24989 | |||
01ffece9ff | |||
5b5fc42a0c | |||
8cd352a600 | |||
2a3fd88081 | |||
d39d8e6195 | |||
b3d86374aa | |||
76db5b69de | |||
1ac3d6ddcb | |||
f4c6a0af1f | |||
d0c392f311 | |||
af1fed3308 | |||
deb0cb236e | |||
31592712a4 | |||
627b3bc095 | |||
ce667c6457 | |||
9b89ba0659 | |||
c86d347034 | |||
5b3a15173a | |||
0430c16f8a | |||
554481f81f | |||
2c84e3d955 | |||
dc7ffba8fa | |||
695719540b | |||
0e019e18c9 | |||
f728bbb14b | |||
4080080acd | |||
0a0f87b9ca | |||
7699a119a3 | |||
73fbcde924 | |||
a1efcc4da9 | |||
d594574ffa | |||
dbbb5e75cf | |||
ddb73db287 | |||
143f092153 | |||
d89adef963 | |||
5f3cbf6f7f | |||
a9fdacc60b | |||
9db9ad3d66 | |||
11dcda77fa | |||
4ce5f0931b | |||
f8e2cd5639 | |||
8b4f66e457 | |||
939631c94e | |||
467a149c06 | |||
f62f720c55 | |||
ba8fd9fcb2 | |||
fdc323af62 | |||
44bac0d67b | |||
191514864e | |||
258a4d5283 | |||
62a85fb888 | |||
7685320466 | |||
c30a2406a9 | |||
9232042c55 | |||
d8b1a59dad | |||
1e05d38059 | |||
d5871fef4e | |||
7f4fa70a41 | |||
fa0c4d8410 | |||
aeb24889fd | |||
8ac9042501 | |||
2d821a07c6 | |||
9680106b45 | |||
709358615c | |||
0ad1b42706 | |||
2333e1f434 | |||
4444db9e6d | |||
c5d483a238 | |||
cc1c66aa13 | |||
67d6c0e8af | |||
b9afac5008 | |||
aadda1f314 | |||
293fa2e375 | |||
ddb1597501 | |||
96f8e961ea | |||
f699dba2ae | |||
250e8ee4a1 | |||
ce47755049 | |||
8125a790a9 | |||
b7e653db6a | |||
74958693a1 | |||
cadc311703 | |||
924f3c9075 | |||
a7933c84c1 | |||
fe1a06ebf2 | |||
823e7dbe1a | |||
90b8217eb2 | |||
c897271756 | |||
d1c9d41954 | |||
1906a10b1a | |||
a03cc57473 | |||
e00799b314 | |||
faa5ce3e83 | |||
937d025ef6 | |||
a748a61cd6 | |||
b24420598c | |||
b005ec7684 | |||
6f6ee29738 | |||
ff3fef6d09 | |||
515958157c | |||
dd4e9030b4 | |||
f94670cad7 | |||
b4dd74f2ff | |||
9a2b548bf6 | |||
d6e3de4f48 | |||
30ccaaf97c | |||
3d9f7ee27e | |||
211dcf3272 | |||
1d0b8a065b | |||
7f82b555c8 | |||
f7aec3cf28 | |||
c6c133f67d | |||
73db23f21f | |||
4744f5c6c6 | |||
e92bda2659 | |||
a10392efcc | |||
e52f13afae | |||
07c50a43ae | |||
0cd2f68bf3 | |||
4ef10f1cec | |||
43151c09e2 | |||
871b5f3246 | |||
ed66bdaec4 | |||
345022f1aa | |||
f296862d3c | |||
5aca310d10 | |||
7dab5dc03f | |||
2d6e0984d1 | |||
028c7af00f | |||
6df83e4259 | |||
afdca418e1 | |||
d8728c1749 | |||
e5afabb221 | |||
a0a6ee0769 | |||
a65bb0b29f | |||
3df7b5504e | |||
99f44ea805 | |||
97ccc84796 | |||
a43b2fb17c | |||
8e72fcab59 | |||
261879022d | |||
2a47ff2977 | |||
c3a81a1cce | |||
220d739fef | |||
4a57c6f230 | |||
4a93b97bec | |||
ac2bbd7e2f | |||
ad9f500ad1 | |||
15d7175750 | |||
41d372a340 | |||
83b84e8d26 | |||
f22daca091 | |||
ae4d5a30f2 | |||
9708481005 | |||
1c32c9e06d | |||
7a3d92ffdb | |||
a72b36d94d | |||
6b25f6f592 | |||
7d91842e8a | |||
2b4b1d2f76 | |||
2ce5c74f33 | |||
168fabfc70 | |||
eb53c28352 | |||
64c38909ff | |||
940492a5e1 | |||
134799c734 | |||
e086da68cd | |||
ed46fd629e | |||
12bb1554f6 | |||
263d9128c4 | |||
8c6aaf4a2d | |||
d9b3e307e3 | |||
709fd716d8 | |||
28053059ff | |||
94ad839437 | |||
84fdd3c750 | |||
3f5d5e0408 | |||
309c390154 | |||
32493c6102 | |||
7569314a24 | |||
e640eab229 | |||
848fe3e428 | |||
a52e4a3262 | |||
7f2d03dcd0 | |||
484cbc8c73 | |||
f6118ec876 | |||
488420eeb6 | |||
a8f066eb51 | |||
7673e5a297 | |||
add873ca9b | |||
f12a533e91 | |||
18766d6d41 | |||
3dd1094c50 | |||
9b2ed944dc | |||
a445463a92 | |||
cefdf98b22 | |||
f675c25b83 | |||
aa954c4807 | |||
87f2b37348 | |||
1d36d84229 | |||
0091690863 | |||
a289125108 | |||
e26019b14b | |||
60981228d4 | |||
03a0914553 | |||
959fce2198 | |||
1c171e4e55 | |||
50780b62a7 | |||
e60820a9d1 | |||
c0fe99714f | |||
6f67c1a277 | |||
04d27cbcb6 | |||
e3ac774a18 | |||
75aedb4d3f | |||
f3e02f9281 | |||
1e254b63a0 | |||
09ba0c470a | |||
549d6d7c8e | |||
616f0b8b4f | |||
52a452488c | |||
b13f7158c7 | |||
9582cd4599 | |||
7e213f3ca6 | |||
cc781cad00 | |||
cbbb638ca7 | |||
63426bc9a8 | |||
63c52fd936 | |||
5e5bc5cd49 | |||
6fb7586b00 | |||
e0da3bf2b4 | |||
db76446099 | |||
20e003656a | |||
ce4c654209 | |||
1dedfaeaac | |||
5b2a1f9fb4 | |||
2cf337f731 | |||
9dbc3777a0 | |||
8c277033cf | |||
7e536515c6 | |||
92d170d065 | |||
41464aec18 | |||
4686796543 | |||
f036820fd8 | |||
acad3c4d5c | |||
34367a7481 | |||
48e8c568e2 | |||
44a0df5cf8 | |||
a72b1f99ab | |||
d48342f90b | |||
3a7283c670 | |||
3beb421e50 | |||
d0fca8272c | |||
8aafa06259 | |||
3c734da86a | |||
a60f3b4b81 | |||
4262bd6ace | |||
6ba4f4df46 | |||
35147230d7 | |||
e649c2f6fc | |||
c7182bf513 | |||
4a8087311f | |||
29ab22b9f9 | |||
5bce00862a | |||
cd81e4bf70 | |||
cb915bc86c | |||
b5dd681f12 | |||
10beb4c144 | |||
b18896adf9 | |||
15be83c06c | |||
04f7710cd6 | |||
914e0d2b01 | |||
3db897d64b | |||
5b9a16826e | |||
b5f986c7d0 | |||
1cd5ced2ab | |||
514f466687 | |||
9e568e1e85 | |||
0697e3d5a4 | |||
6deb231e0e | |||
3cda19f61b | |||
e28babb0b8 | |||
dce913496e | |||
25d4905d6c | |||
00b2a773b4 | |||
07b8c5bc7a | |||
3c3f1678e5 | |||
5eb1c4e4bf | |||
1ffb76501e | |||
c0f7a75d5c | |||
ef7e4a8b57 | |||
cf977950fd | |||
84e0f7bc2d | |||
4a8e71e2c6 | |||
41bb1ca707 | |||
63a9cf2963 | |||
27ace66836 | |||
63ad844001 | |||
2c73552837 | |||
78af350610 | |||
9d18bc545f | |||
21e5441f92 | |||
b503379319 | |||
d1f2e7c0cd | |||
abab635a01 | |||
6d1eef039e | |||
ccd805ed3c | |||
5f2b9028b8 | |||
4272af584f | |||
52ce8ec145 | |||
58bd637c43 | |||
8f493c8e13 | |||
d596e65ae0 | |||
a0b0bb6cbf | |||
b795f8043c | |||
abd9d5919d | |||
52ab0be787 | |||
dd7d3bf738 | |||
bc595a9724 | |||
5865602fd0 | |||
f07a1edcfd | |||
4b03b3f4c5 | |||
a9997cf377 | |||
00cbfd2eb9 | |||
5c4216538b | |||
a22bc5a261 | |||
94fd22b448 | |||
f11bb8bfd4 | |||
a9011a641f | |||
15559974a8 | |||
73930d7e8b | |||
8ad7339cf1 | |||
ab1b3b09d6 | |||
026dfadb59 | |||
1b0024518b | |||
d1bef0ce48 | |||
4db365c947 | |||
f60b65c25f | |||
f192c665d9 | |||
30da53f3d7 | |||
962f9aad11 | |||
6f3fc22c9b | |||
b90ed6bab3 | |||
25ee6f8116 | |||
ccb3875e86 | |||
9860ac983c | |||
9d63456ed9 | |||
7ac25e221e | |||
090d2d8362 | |||
3b8b307c4d | |||
9050759f6b | |||
cb2f06b6c1 | |||
802d19729b | |||
58f52f92f8 | |||
1351bfc6e9 | |||
4b2359ffa8 | |||
f71dbf2df3 | |||
de50f45295 | |||
241a77040c | |||
279d54f800 | |||
205d3d10e3 | |||
36c10c74e4 | |||
87fa313d44 | |||
96c669803c | |||
f28f301865 | |||
83f9eae654 | |||
6792bf8876 | |||
fb1768270e | |||
94fa58cd6e | |||
b2327e7641 | |||
94a23f0d21 | |||
b20d6317fd | |||
3a64c7e101 | |||
6a4cb5eebf | |||
ecb614765a | |||
6bd1df2901 | |||
a2e5203b85 | |||
d22b9a4403 | |||
03287936e9 | |||
962eb8da3c | |||
a0f607b5ac | |||
a1353d567b | |||
0cf949f362 | |||
3b171a02b7 | |||
1c7d47da66 | |||
dba5905a4f | |||
6017ea07b8 | |||
672446ed9c | |||
c32179fcaa | |||
31acb560da | |||
866c348da7 | |||
f12aef2f4e | |||
47ccc57d81 | |||
648b5575fc | |||
f09e044be4 | |||
e8a408c15c | |||
a5ca86c5a0 | |||
20dde50ed3 | |||
86939937cf | |||
4a9b9a2d14 | |||
cb6dadbf94 | |||
e40a0b1f8b | |||
9e23a5edab | |||
9c3ff1d71b | |||
29de5d34d6 | |||
8c891b04f2 | |||
8efdbd54e8 | |||
a3cd3ed6b8 | |||
202b71537a | |||
fdf5c7161c | |||
47e226aecf | |||
bcb83c259f | |||
d55c0c1c2d | |||
17047da18b | |||
57d1dd44a8 | |||
efb2823391 | |||
8752148e6e | |||
469952c851 | |||
8a711e8bb4 | |||
9ccdeff6ca | |||
004f187cd4 | |||
6b5200fead | |||
ac3fcc4284 | |||
4a434d581d |
@ -1,18 +1,30 @@
|
||||
[bumpversion]
|
||||
current_version = 2023.8.3
|
||||
current_version = 2024.6.5
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
serialize = {major}.{minor}.{patch}
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
||||
{major}.{minor}.{patch}
|
||||
message = release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:rc_t]
|
||||
values =
|
||||
rc
|
||||
final
|
||||
optional_value = final
|
||||
|
||||
[bumpversion:file:pyproject.toml]
|
||||
|
||||
[bumpversion:file:package.json]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
||||
[bumpversion:file:schema.yml]
|
||||
|
||||
[bumpversion:file:blueprints/schema.json]
|
||||
|
||||
[bumpversion:file:authentik/__init__.py]
|
||||
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
@ -9,3 +9,4 @@ blueprints/local
|
||||
.git
|
||||
!gen-ts-api/node_modules
|
||||
!gen-ts-api/dist/**
|
||||
!gen-go-api/
|
||||
|
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@ -1 +1 @@
|
||||
github: [BeryJu]
|
||||
custom: https://goauthentik.io/pricing/
|
||||
|
2
.github/ISSUE_TEMPLATE/question.md
vendored
2
.github/ISSUE_TEMPLATE/question.md
vendored
@ -9,7 +9,7 @@ assignees: ""
|
||||
**Describe your question/**
|
||||
A clear and concise description of what you're trying to do.
|
||||
|
||||
**Relevant infos**
|
||||
**Relevant info**
|
||||
i.e. Version of other software you're using, specifics of your setup
|
||||
|
||||
**Screenshots**
|
||||
|
@ -9,9 +9,6 @@ inputs:
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate config
|
||||
id: ev
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
|
73
.github/actions/docker-push-variables/action.yml
vendored
73
.github/actions/docker-push-variables/action.yml
vendored
@ -1,64 +1,47 @@
|
||||
---
|
||||
name: "Prepare docker environment variables"
|
||||
description: "Prepare docker environment variables"
|
||||
|
||||
inputs:
|
||||
image-name:
|
||||
required: true
|
||||
description: "Docker image prefix"
|
||||
image-arch:
|
||||
required: false
|
||||
description: "Docker image arch"
|
||||
|
||||
outputs:
|
||||
shouldBuild:
|
||||
description: "Whether to build image or not"
|
||||
value: ${{ steps.ev.outputs.shouldBuild }}
|
||||
branchName:
|
||||
description: "Branch name"
|
||||
value: ${{ steps.ev.outputs.branchName }}
|
||||
branchNameContainer:
|
||||
description: "Branch name (for containers)"
|
||||
value: ${{ steps.ev.outputs.branchNameContainer }}
|
||||
timestamp:
|
||||
description: "Timestamp"
|
||||
value: ${{ steps.ev.outputs.timestamp }}
|
||||
|
||||
sha:
|
||||
description: "sha"
|
||||
value: ${{ steps.ev.outputs.sha }}
|
||||
shortHash:
|
||||
description: "shortHash"
|
||||
value: ${{ steps.ev.outputs.shortHash }}
|
||||
|
||||
version:
|
||||
description: "version"
|
||||
description: "Version"
|
||||
value: ${{ steps.ev.outputs.version }}
|
||||
versionFamily:
|
||||
description: "versionFamily"
|
||||
value: ${{ steps.ev.outputs.versionFamily }}
|
||||
prerelease:
|
||||
description: "Prerelease"
|
||||
value: ${{ steps.ev.outputs.prerelease }}
|
||||
|
||||
imageTags:
|
||||
description: "Docker image tags"
|
||||
value: ${{ steps.ev.outputs.imageTags }}
|
||||
imageMainTag:
|
||||
description: "Docker image main tag"
|
||||
value: ${{ steps.ev.outputs.imageMainTag }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate config
|
||||
id: ev
|
||||
shell: python
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
run: |
|
||||
"""Helper script to get the actual branch name, docker safe"""
|
||||
import configparser
|
||||
import os
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
|
||||
branch_name = os.environ["GITHUB_REF"]
|
||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
|
||||
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
||||
version = parser.get("bumpversion", "current_version")
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % sha, file=_output)
|
||||
print("shortHash=%s" % sha[:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
||||
python3 ${{ github.action_path }}/push_vars.py
|
||||
|
62
.github/actions/docker-push-variables/push_vars.py
vendored
Normal file
62
.github/actions/docker-push-variables/push_vars.py
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
"""Helper script to get the actual branch name, docker safe"""
|
||||
|
||||
import configparser
|
||||
import os
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
|
||||
should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower()
|
||||
|
||||
branch_name = os.environ["GITHUB_REF"]
|
||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
|
||||
|
||||
image_names = os.getenv("IMAGE_NAME").split(",")
|
||||
image_arch = os.getenv("IMAGE_ARCH") or None
|
||||
|
||||
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
|
||||
is_release = "dev" not in image_names[0]
|
||||
|
||||
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
|
||||
|
||||
# 2042.1.0 or 2042.1.0-rc1
|
||||
version = parser.get("bumpversion", "current_version")
|
||||
# 2042.1
|
||||
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
|
||||
prerelease = "-" in version
|
||||
|
||||
image_tags = []
|
||||
if is_release:
|
||||
for name in image_names:
|
||||
image_tags += [
|
||||
f"{name}:{version}",
|
||||
]
|
||||
if not prerelease:
|
||||
image_tags += [
|
||||
f"{name}:latest",
|
||||
f"{name}:{version_family}",
|
||||
]
|
||||
else:
|
||||
suffix = ""
|
||||
if image_arch and image_arch != "amd64":
|
||||
suffix = f"-{image_arch}"
|
||||
for name in image_names:
|
||||
image_tags += [
|
||||
f"{name}:gh-{sha}{suffix}", # Used for ArgoCD and PR comments
|
||||
f"{name}:gh-{safe_branch_name}{suffix}", # For convenience
|
||||
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
|
||||
]
|
||||
|
||||
image_main_tag = image_tags[0]
|
||||
image_tags_rendered = ",".join(image_tags)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldBuild={should_build}", file=_output)
|
||||
print(f"sha={sha}", file=_output)
|
||||
print(f"version={version}", file=_output)
|
||||
print(f"prerelease={prerelease}", file=_output)
|
||||
print(f"imageTags={image_tags_rendered}", file=_output)
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
7
.github/actions/docker-push-variables/test.sh
vendored
Executable file
7
.github/actions/docker-push-variables/test.sh
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
#!/bin/bash -x
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
python $SCRIPT_DIR/push_vars.py
|
27
.github/actions/setup/action.yml
vendored
27
.github/actions/setup/action.yml
vendored
@ -2,36 +2,39 @@ name: "Setup authentik testing environment"
|
||||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_tag:
|
||||
postgresql_version:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
default: "16"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install poetry
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt update
|
||||
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version-file: "pyproject.toml"
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
|
4
.github/actions/setup/docker-compose.yml
vendored
4
.github/actions/setup/docker-compose.yml
vendored
@ -1,8 +1,6 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
|
4
.github/codecov.yml
vendored
4
.github/codecov.yml
vendored
@ -6,5 +6,5 @@ coverage:
|
||||
# adjust accordingly based on how flaky your tests are
|
||||
# this allows a 1% drop from the previous base commit coverage
|
||||
threshold: 1%
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
comment:
|
||||
after_n_builds: 3
|
||||
|
3
.github/codespell-words.txt
vendored
3
.github/codespell-words.txt
vendored
@ -2,3 +2,6 @@ keypair
|
||||
keypairs
|
||||
hass
|
||||
warmup
|
||||
ontext
|
||||
singed
|
||||
assertIn
|
||||
|
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@ -21,7 +21,10 @@ updates:
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
directories:
|
||||
- "/web"
|
||||
- "/tests/wdio"
|
||||
- "/web/sfe"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
@ -34,13 +37,14 @@ updates:
|
||||
sentry:
|
||||
patterns:
|
||||
- "@sentry/*"
|
||||
- "@spotlightjs/*"
|
||||
babel:
|
||||
patterns:
|
||||
- "@babel/*"
|
||||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@typescript-eslint/eslint-*"
|
||||
- "@typescript-eslint/*"
|
||||
- "eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
@ -50,6 +54,13 @@ updates:
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
rollup:
|
||||
patterns:
|
||||
- "@rollup/*"
|
||||
- "rollup-*"
|
||||
wdio:
|
||||
patterns:
|
||||
- "@wdio/*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -27,7 +27,6 @@ If an API change has been made
|
||||
If changes to the frontend have been made
|
||||
|
||||
- [ ] The code has been formatted (`make web`)
|
||||
- [ ] The translation files have been updated (`make i18n-extract`)
|
||||
|
||||
If applicable
|
||||
|
||||
|
65
.github/workflows/api-py-publish.yml
vendored
Normal file
65
.github/workflows/api-py-publish.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
name: authentik-api-py-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
cache: "poetry"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-py
|
||||
- name: Publish package
|
||||
working-directory: gen-py-api/
|
||||
run: |
|
||||
poetry build
|
||||
- name: Publish package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: gen-py-api/dist/
|
||||
# We can't easily upgrade the API client being used due to poetry being poetry
|
||||
# so we'll have to rely on dependabot
|
||||
# - name: Upgrade /
|
||||
# run: |
|
||||
# export VERSION=$(cd gen-py-api && poetry version -s)
|
||||
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
|
||||
# - uses: peter-evans/create-pull-request@v6
|
||||
# id: cpr
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# branch: update-root-api-client
|
||||
# commit-message: "root: bump API Client version"
|
||||
# title: "root: bump API Client version"
|
||||
# body: "root: bump API Client version"
|
||||
# delete-branch: true
|
||||
# signoff: true
|
||||
# # ID from https://api.github.com/users/authentik-automation[bot]
|
||||
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||
# - uses: peter-evans/enable-pull-request-automerge@v3
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
# merge-method: squash
|
@ -1,4 +1,4 @@
|
||||
name: authentik-web-api-publish
|
||||
name: authentik-api-ts-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@ -17,9 +17,9 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
@ -31,11 +31,16 @@ jobs:
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
||||
- name: Upgrade /web
|
||||
working-directory: web/
|
||||
working-directory: web
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
- name: Upgrade /web/sfe
|
||||
working-directory: web/sfe
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
155
.github/workflows/ci-main.yml
vendored
155
.github/workflows/ci-main.yml
vendored
@ -1,3 +1,4 @@
|
||||
---
|
||||
name: authentik-ci-main
|
||||
|
||||
on:
|
||||
@ -6,11 +7,10 @@ on:
|
||||
- main
|
||||
- next
|
||||
- version-*
|
||||
paths-ignore:
|
||||
- website
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -26,10 +26,7 @@ jobs:
|
||||
- bandit
|
||||
- black
|
||||
- codespell
|
||||
- isort
|
||||
- pending-migrations
|
||||
- pylint
|
||||
- pyright
|
||||
- ruff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -47,25 +44,33 @@ jobs:
|
||||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
rm -rf /home/runner/.cache/pypoetry
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
- name: Setup authentik env (stable)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
@ -75,11 +80,21 @@ jobs:
|
||||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
poetry install
|
||||
# Delete previous poetry env
|
||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
run: |
|
||||
poetry run python -m lifecycle.migrate
|
||||
- name: run tests
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
run: |
|
||||
poetry run make test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
@ -88,22 +103,23 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
@ -112,15 +128,16 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.8.0
|
||||
uses: helm/kind-action@v1.10.0
|
||||
- name: run integration
|
||||
run: |
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: integration
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
@ -141,6 +158,8 @@ jobs:
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: radius
|
||||
glob: tests/e2e/test_provider_radius*
|
||||
- name: scim
|
||||
glob: tests/e2e/test_source_scim*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
@ -149,9 +168,9 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
docker compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||
@ -167,9 +186,10 @@ jobs:
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: e2e
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
ci-core-mark:
|
||||
needs:
|
||||
- lint
|
||||
@ -182,8 +202,17 @@ jobs:
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -198,9 +227,12 @@ jobs:
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-server
|
||||
image-arch: ${{ matrix.arch }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -214,65 +246,34 @@ jobs:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
||||
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
pr-comment:
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
permissions:
|
||||
# Needed to write comments on PRs
|
||||
pull-requests: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-server
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tag: gh-${{ steps.ev.outputs.imageMainTag }}
|
||||
|
37
.github/workflows/ci-outpost.yml
vendored
37
.github/workflows/ci-outpost.yml
vendored
@ -1,3 +1,4 @@
|
||||
---
|
||||
name: authentik-ci-outpost
|
||||
|
||||
on:
|
||||
@ -9,13 +10,14 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
@ -27,16 +29,16 @@ jobs:
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
version: v1.52.2
|
||||
version: v1.54.2
|
||||
args: --timeout 5000s --verbose
|
||||
skip-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
@ -64,7 +66,11 @@ jobs:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
- rac
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@ -78,9 +84,11 @@ jobs:
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -90,19 +98,15 @@ jobs:
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max
|
||||
build-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
@ -115,18 +119,19 @@ jobs:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
- rac
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
|
63
.github/workflows/ci-web.yml
vendored
63
.github/workflows/ci-web.yml
vendored
@ -9,31 +9,55 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-eslint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
include:
|
||||
- command: tsc
|
||||
project: web
|
||||
extra_setup: |
|
||||
cd sfe/ && npm ci
|
||||
exclude:
|
||||
- command: lint:lockfile
|
||||
project: tests/wdio
|
||||
- command: tsc
|
||||
project: tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: Eslint
|
||||
working-directory: web/
|
||||
working-directory: ${{ matrix.project }}/
|
||||
run: npm run lint
|
||||
lint-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- working-directory: web/
|
||||
run: |
|
||||
[ -z "$(jq -r '.packages | to_entries[] | select((.key | startswith("node_modules")) and (.value | has("resolved") | not)) | .key' < package-lock.json)" ]
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
@ -45,27 +69,33 @@ jobs:
|
||||
run: npm run tsc
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: prettier
|
||||
working-directory: web/
|
||||
working-directory: ${{ matrix.project }}/
|
||||
run: npm run prettier-check
|
||||
lint-lit-analyse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
@ -82,6 +112,7 @@ jobs:
|
||||
run: npm run lit-analyse
|
||||
ci-web-mark:
|
||||
needs:
|
||||
- lint-lockfile
|
||||
- lint-eslint
|
||||
- lint-prettier
|
||||
- lint-lit-analyse
|
||||
@ -95,9 +126,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
22
.github/workflows/ci-website.yml
vendored
22
.github/workflows/ci-website.yml
vendored
@ -9,15 +9,23 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- working-directory: website/
|
||||
run: |
|
||||
[ -z "$(jq -r '.packages | to_entries[] | select((.key | startswith("node_modules")) and (.value | has("resolved") | not)) | .key' < package-lock.json)" ]
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
@ -29,9 +37,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
@ -47,12 +55,11 @@ jobs:
|
||||
matrix:
|
||||
job:
|
||||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
@ -62,6 +69,7 @@ jobs:
|
||||
run: npm run ${{ matrix.job }}
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-lockfile
|
||||
- lint-prettier
|
||||
- test
|
||||
- build
|
||||
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@ -27,10 +27,10 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
43
.github/workflows/gen-update-webauthn-mds.yml
vendored
Normal file
43
.github/workflows/gen-update-webauthn-mds.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
name: authentik-gen-update-webauthn-mds
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '30 1 1,15 * *'
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- run: poetry run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: update-fido-mds-client
|
||||
commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||
title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||
body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
4
.github/workflows/gha-cache-cleanup.yml
vendored
4
.github/workflows/gha-cache-cleanup.yml
vendored
@ -6,6 +6,10 @@ on:
|
||||
types:
|
||||
- closed
|
||||
|
||||
permissions:
|
||||
# Permission to delete cache
|
||||
actions: write
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
|
4
.github/workflows/ghcr-retention.yml
vendored
4
.github/workflows/ghcr-retention.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: ghcr-retention
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
# schedule:
|
||||
# - cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
||||
with:
|
||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
id: cpr
|
||||
with:
|
||||
|
1
.github/workflows/release-next-branch.yml
vendored
1
.github/workflows/release-next-branch.yml
vendored
@ -6,6 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push to the next branch
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
|
85
.github/workflows/release-publish.yml
vendored
85
.github/workflows/release-publish.yml
vendored
@ -1,3 +1,4 @@
|
||||
---
|
||||
name: authentik-on-release
|
||||
|
||||
on:
|
||||
@ -7,6 +8,9 @@ on:
|
||||
jobs:
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
@ -16,6 +20,10 @@ jobs:
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@ -27,29 +35,25 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty ts client
|
||||
run: mkdir -p ./gen-ts-client
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
push: true
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: |
|
||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/server:latest
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -57,9 +61,10 @@ jobs:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
- rac
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
@ -69,6 +74,14 @@ jobs:
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@ -83,22 +96,17 @@ jobs:
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||
beryju/authentik-${{ matrix.type }}:latest,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
push: true
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
context: .
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload binaries to the release
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -110,12 +118,12 @@ jobs:
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
@ -147,12 +155,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
sentry-release:
|
||||
needs:
|
||||
- build-server
|
||||
@ -164,15 +172,18 @@ jobs:
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/server
|
||||
- name: Get static files from docker image
|
||||
run: |
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
||||
docker pull ${{ steps.ev.outputs.imageMainTag }}
|
||||
container=$(docker container create ${{ steps.ev.outputs.imageMainTag }})
|
||||
docker cp ${container}:web/ .
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v1
|
||||
continue-on-error: true
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: authentik-security-inc
|
||||
|
28
.github/workflows/release-tag.yml
vendored
28
.github/workflows/release-tag.yml
vendored
@ -1,3 +1,4 @@
|
||||
---
|
||||
name: authentik-on-tag
|
||||
|
||||
on:
|
||||
@ -13,27 +14,28 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v6
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
github-token: ${{ steps.generate_token.outputs.token }}
|
||||
script: |
|
||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||
image-name: ghcr.io/goauthentik/server
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.1.4
|
||||
@ -41,6 +43,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
release_name: Release ${{ steps.ev.outputs.version }}
|
||||
draft: true
|
||||
prerelease: false
|
||||
prerelease: ${{ steps.ev.outputs.prerelease == 'true' }}
|
||||
|
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -6,8 +6,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to update issues and PRs
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
@ -18,12 +18,12 @@ jobs:
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v8
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
11
.github/workflows/translation-advice.yml
vendored
11
.github/workflows/translation-advice.yml
vendored
@ -7,21 +7,26 @@ on:
|
||||
paths:
|
||||
- "!**"
|
||||
- "locale/**"
|
||||
- "web/src/locales/**"
|
||||
- "!locale/en/**"
|
||||
- "web/xliff/**"
|
||||
|
||||
permissions:
|
||||
# Permission to write comment
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
post-comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
uses: peter-evans/find-comment@v3
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik translations instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
@ -1,9 +1,8 @@
|
||||
name: authentik-backend-translate-compile
|
||||
---
|
||||
name: authentik-backend-translate-extract-compile
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "locale/**"
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@ -25,16 +24,20 @@ jobs:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run extract
|
||||
run: |
|
||||
poetry run make i18n-extract
|
||||
- name: run compile
|
||||
run: poetry run ak compilemessages
|
||||
run: |
|
||||
poetry run ak compilemessages
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
body: "core: compile backend translations"
|
||||
branch: extract-compile-backend-translation
|
||||
commit-message: "core, web: update translations"
|
||||
title: "core, web: update translations"
|
||||
body: "core, web: update translations"
|
||||
delete-branch: true
|
||||
signoff: true
|
4
.github/workflows/translation-rename.yml
vendored
4
.github/workflows/translation-rename.yml
vendored
@ -6,6 +6,10 @@ on:
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
|
||||
permissions:
|
||||
# Permission to rename PR
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
rename_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -206,3 +206,6 @@ data/
|
||||
.netlify
|
||||
.ruff_cache
|
||||
source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
4
.vscode/extensions.json
vendored
4
.vscode/extensions.json
vendored
@ -10,10 +10,10 @@
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-python.pylint",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
|
17
.vscode/settings.json
vendored
17
.vscode/settings.json
vendored
@ -4,25 +4,24 @@
|
||||
"asgi",
|
||||
"authentik",
|
||||
"authn",
|
||||
"entra",
|
||||
"goauthentik",
|
||||
"jwks",
|
||||
"kubernetes",
|
||||
"oidc",
|
||||
"openid",
|
||||
"passwordless",
|
||||
"plex",
|
||||
"saml",
|
||||
"totp",
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless",
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo",
|
||||
"scim",
|
||||
"slo",
|
||||
"sso",
|
||||
"totp",
|
||||
"traefik",
|
||||
"webauthn",
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"python.formatting.provider": "black",
|
||||
"yaml.customTags": [
|
||||
"!Find sequence",
|
||||
"!KeyOf scalar",
|
||||
|
@ -9,6 +9,10 @@ lifecycle/ @goauthentik/backend
|
||||
schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
poetry.lock @goauthentik/backend
|
||||
go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
@ -17,6 +21,7 @@ Dockerfile @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
# Docs & Website
|
||||
website/ @goauthentik/docs
|
||||
# Security
|
||||
|
79
Dockerfile
79
Dockerfile
@ -1,5 +1,7 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
@ -7,38 +9,61 @@ WORKDIR /work/website
|
||||
|
||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./schema.yml /work/
|
||||
COPY ./SECURITY.md /work/
|
||||
|
||||
RUN npm run build-docs-only
|
||||
RUN npm run build-bundled
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /work/web
|
||||
|
||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
--mount=type=bind,target=/work/web/sfe/package.json,src=./web/sfe/package.json \
|
||||
--mount=type=bind,target=/work/web/sfe/package-lock.json,src=./web/sfe/package-lock.json \
|
||||
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
|
||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
||||
npm ci --include=dev && \
|
||||
cd sfe && \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./package.json /work
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||
|
||||
RUN npm run build
|
||||
RUN npm run build && \
|
||||
cd sfe && \
|
||||
npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM docker.io/golang:1.21.1-bookworm AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
ARG GOOS=$TARGETOS
|
||||
ARG GOARCH=$TARGETARCH
|
||||
|
||||
WORKDIR /go/src/goauthentik.io
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
dpkg --add-architecture arm64 && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu
|
||||
|
||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
@ -53,17 +78,17 @@ COPY ./internal /go/src/goauthentik.io/internal
|
||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||
|
||||
ENV CGO_ENABLED=0
|
||||
|
||||
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
|
||||
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
|
||||
go build -o /go/authentik ./cmd/server
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
||||
ENV GEOIPUPDATE_VERBOSE="1"
|
||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||
|
||||
@ -74,7 +99,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps
|
||||
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
@ -82,22 +107,26 @@ ENV VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
|
||||
|
||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
poetry install --only=main --no-ansi --no-interaction
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||
pip install --force-reinstall /wheels/*"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ARG VERSION
|
||||
@ -114,7 +143,7 @@ WORKDIR /
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
apt-get clean && \
|
||||
@ -138,7 +167,7 @@ COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
COPY --from=website-builder /work/website/build/ /website/help/
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
USER 1000
|
||||
@ -146,10 +175,12 @@ USER 1000
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:$PATH" \
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||
ENV GOFIPS=1
|
||||
|
||||
ENTRYPOINT [ "dumb-init", "--", "/lifecycle/ak" ]
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
|
||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
||||
|
140
Makefile
140
Makefile
@ -5,9 +5,13 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
GEN_API_TS = "gen-ts-api"
|
||||
GEN_API_PY = "gen-py-api"
|
||||
GEN_API_GO = "gen-go-api"
|
||||
|
||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
@ -15,6 +19,7 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
@ -28,10 +33,13 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
||||
|
||||
help: ## Show this help
|
||||
@echo "\nSpecify a command. The choices are:\n"
|
||||
@grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-24s\033[m %s\n", $$1, $$2}' | \
|
||||
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
|
||||
sort
|
||||
@echo ""
|
||||
|
||||
@ -39,12 +47,12 @@ test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
echo "PG_PASS=$(shell openssl rand 32 | base64)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
@ -53,32 +61,40 @@ test: ## Run the server tests and produce a coverage report (locally)
|
||||
coverage report
|
||||
|
||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
black $(PY_SOURCES)
|
||||
ruff check --fix $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint: ## Lint the python and golang sources
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
poetry install
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
|
||||
i18n-extract-core:
|
||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
core-i18n-extract:
|
||||
ak makemessages \
|
||||
--add-location file \
|
||||
--no-obsolete \
|
||||
--ignore web \
|
||||
--ignore internal \
|
||||
--ignore ${GEN_API_TS} \
|
||||
--ignore ${GEN_API_GO} \
|
||||
--ignore website \
|
||||
-l en
|
||||
|
||||
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||
poetry install
|
||||
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||
|
||||
dev-drop-db:
|
||||
echo dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||
echo redis-cli -n 0 flushall
|
||||
redis-cli -n 0 flushall
|
||||
|
||||
dev-create-db:
|
||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
@ -90,8 +106,14 @@ dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik
|
||||
#########################
|
||||
|
||||
gen-build: ## Extract the schema from the database
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
@ -102,53 +124,77 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.8 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
sed -i 's/{/{/g' diff.md
|
||||
sed -i 's/}/}/g' diff.md
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
rm -rf api/
|
||||
gen-clean-ts: ## Remove generated API client for Typescript
|
||||
rm -rf ./${GEN_API_TS}/
|
||||
rm -rf ./web/node_modules/@goauthentik/api/
|
||||
|
||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
gen-clean-go: ## Remove generated API client for Go
|
||||
rm -rf ./${GEN_API_GO}/
|
||||
|
||||
gen-clean-py: ## Remove generated API client for Python
|
||||
rm -rf ./${GEN_API_PY}/
|
||||
|
||||
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
|
||||
|
||||
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
-o /local/${GEN_API_TS} \
|
||||
-c /local/scripts/api-ts-config.yaml \
|
||||
--additional-properties=npmVersion=${NPM_VERSION} \
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
mkdir -p web/node_modules/@goauthentik/api
|
||||
cd gen-ts-api && npm i
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
cd ./${GEN_API_TS} && npm i
|
||||
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go: ## Build and install the authentik API for Golang
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
||||
cp schema.yml ./gen-go-api/
|
||||
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||
docker run \
|
||||
--rm -v ${PWD}/gen-go-api:/local \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g python \
|
||||
-o /local/${GEN_API_PY} \
|
||||
-c /local/scripts/api-py-config.yaml \
|
||||
--additional-properties=packageVersion=${NPM_VERSION} \
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
pip install ./${GEN_API_PY}
|
||||
|
||||
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
||||
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
|
||||
cp schema.yml ./${GEN_API_GO}/
|
||||
docker run \
|
||||
--rm -v ${PWD}/${GEN_API_GO}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/ \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
|
||||
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
python -m scripts.generate_config
|
||||
|
||||
gen: gen-build gen-clean gen-client-ts
|
||||
gen: gen-build gen-client-ts
|
||||
|
||||
#########################
|
||||
## Web
|
||||
@ -157,7 +203,7 @@ gen: gen-build gen-clean gen-client-ts
|
||||
web-build: web-install ## Build the Authentik UI
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
|
||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||
cd web && npm ci
|
||||
@ -207,6 +253,7 @@ website-watch: ## Build and watch the documentation website, updating automatic
|
||||
#########################
|
||||
|
||||
docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
#########################
|
||||
@ -219,9 +266,6 @@ ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-pylint: ci--meta-debug
|
||||
pylint $(PY_SOURCES)
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
@ -231,14 +275,8 @@ ci-ruff: ci--meta-debug
|
||||
ci-codespell: ci--meta-debug
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
bandit -r $(PY_SOURCES)
|
||||
|
||||
ci-pyright: ci--meta-debug
|
||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
@ -25,10 +25,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Screenshots
|
||||
|
||||
| Light | Dark |
|
||||
| ------------------------------------------------------ | ----------------------------------------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
| Light | Dark |
|
||||
| ----------------------------------------------------------- | ---------------------------------------------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
|
||||
## Development
|
||||
|
||||
|
24
SECURITY.md
24
SECURITY.md
@ -1,5 +1,9 @@
|
||||
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
|
||||
## Independent audits and pentests
|
||||
|
||||
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
|
||||
|
||||
## What authentik classifies as a CVE
|
||||
|
||||
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
||||
@ -14,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| --- | --- |
|
||||
| 2023.6.x | ✅ |
|
||||
| 2023.8.x | ✅ |
|
||||
| Version | Supported |
|
||||
| -------- | --------- |
|
||||
| 2024.4.x | ✅ |
|
||||
| 2024.6.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
@ -27,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
|
||||
|
||||
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||
|
||||
| Score | Severity |
|
||||
| --- | --- |
|
||||
| 0.0 | None |
|
||||
| 0.1 – 3.9 | Low |
|
||||
| 4.0 – 6.9 | Medium |
|
||||
| 7.0 – 8.9 | High |
|
||||
| Score | Severity |
|
||||
| ---------- | -------- |
|
||||
| 0.0 | None |
|
||||
| 0.1 – 3.9 | Low |
|
||||
| 4.0 – 6.9 | Medium |
|
||||
| 7.0 – 8.9 | High |
|
||||
| 9.0 – 10.0 | Critical |
|
||||
|
||||
## Disclosure process
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""authentik root module"""
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.8.3"
|
||||
from os import environ
|
||||
|
||||
__version__ = "2024.6.5"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
||||
def get_build_hash(fallback: str | None = None) -> str:
|
||||
"""Get build hash"""
|
||||
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||
return fallback if build_hash == "" and fallback else build_hash
|
||||
|
@ -1,7 +1,8 @@
|
||||
"""Meta API"""
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
@ -21,7 +22,7 @@ class AppSerializer(PassiveSerializer):
|
||||
class AppsViewSet(ViewSet):
|
||||
"""Read-only view list all installed apps"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
@ -35,7 +36,7 @@ class AppsViewSet(ViewSet):
|
||||
class ModelViewSet(ViewSet):
|
||||
"""Read-only view list all installed models"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
|
@ -1,11 +1,12 @@
|
||||
"""authentik administration metrics"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models.functions import ExtractHour
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@ -68,7 +69,7 @@ class LoginMetricsSerializer(PassiveSerializer):
|
||||
class AdministrationMetricsViewSet(APIView):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
|
@ -1,44 +1,52 @@
|
||||
"""authentik administration overview"""
|
||||
|
||||
import platform
|
||||
from datetime import datetime
|
||||
from ssl import OPENSSL_VERSION
|
||||
from sys import version as python_version
|
||||
from typing import TypedDict
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from gunicorn import version_info as gunicorn_version
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
|
||||
|
||||
class RuntimeDict(TypedDict):
|
||||
"""Runtime information"""
|
||||
|
||||
python_version: str
|
||||
gunicorn_version: str
|
||||
environment: str
|
||||
architecture: str
|
||||
platform: str
|
||||
uname: str
|
||||
openssl_version: str
|
||||
openssl_fips_enabled: bool | None
|
||||
authentik_version: str
|
||||
|
||||
|
||||
class SystemSerializer(PassiveSerializer):
|
||||
class SystemInfoSerializer(PassiveSerializer):
|
||||
"""Get system information."""
|
||||
|
||||
http_headers = SerializerMethodField()
|
||||
http_host = SerializerMethodField()
|
||||
http_is_secure = SerializerMethodField()
|
||||
runtime = SerializerMethodField()
|
||||
tenant = SerializerMethodField()
|
||||
brand = SerializerMethodField()
|
||||
server_time = SerializerMethodField()
|
||||
embedded_outpost_disabled = SerializerMethodField()
|
||||
embedded_outpost_host = SerializerMethodField()
|
||||
|
||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||
@ -61,22 +69,30 @@ class SystemSerializer(PassiveSerializer):
|
||||
def get_runtime(self, request: Request) -> RuntimeDict:
|
||||
"""Get versions"""
|
||||
return {
|
||||
"python_version": python_version,
|
||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
||||
"environment": get_env(),
|
||||
"architecture": platform.machine(),
|
||||
"authentik_version": get_full_version(),
|
||||
"environment": get_env(),
|
||||
"openssl_fips_enabled": (
|
||||
backend._fips_enabled if LicenseKey.get_total().is_valid() else None
|
||||
),
|
||||
"openssl_version": OPENSSL_VERSION,
|
||||
"platform": platform.platform(),
|
||||
"python_version": python_version,
|
||||
"uname": " ".join(platform.uname()),
|
||||
}
|
||||
|
||||
def get_tenant(self, request: Request) -> str:
|
||||
"""Currently active tenant"""
|
||||
return str(request._request.tenant)
|
||||
def get_brand(self, request: Request) -> str:
|
||||
"""Currently active brand"""
|
||||
return str(request._request.brand)
|
||||
|
||||
def get_server_time(self, request: Request) -> datetime:
|
||||
"""Current server time"""
|
||||
return now()
|
||||
|
||||
def get_embedded_outpost_disabled(self, request: Request) -> bool:
|
||||
"""Whether the embedded outpost is disabled"""
|
||||
return CONFIG.get_bool("outposts.disable_embedded_outpost", False)
|
||||
|
||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||
"""Get the FQDN configured on the embedded outpost"""
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
@ -88,17 +104,17 @@ class SystemSerializer(PassiveSerializer):
|
||||
class SystemView(APIView):
|
||||
"""Get system information."""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
serializer_class = SystemInfoSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
return Response(SystemInfoSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
return Response(SystemInfoSerializer(request).data)
|
||||
|
@ -1,132 +0,0 @@
|
||||
"""Tasks API"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.contrib import messages
|
||||
from django.http.response import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class TaskSerializer(PassiveSerializer):
|
||||
"""Serialize TaskInfo and TaskResult"""
|
||||
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||
task_duration = SerializerMethodField()
|
||||
|
||||
status = ChoiceField(
|
||||
source="result.status.name",
|
||||
choices=[(x.name, x.name) for x in TaskResultStatus],
|
||||
)
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||
"""Get the duration a task took to run"""
|
||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||
|
||||
def to_representation(self, instance: TaskInfo):
|
||||
"""When a new version of authentik adds fields to TaskInfo,
|
||||
the API will fail with an AttributeError, as the classes
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
# pylint: disable=broad-except
|
||||
except Exception: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
else:
|
||||
self.instance.delete()
|
||||
return {}
|
||||
|
||||
|
||||
class TaskViewSet(ViewSet):
|
||||
"""Read-only view set that returns all background tasks"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = TaskSerializer
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: TaskSerializer(many=False),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
def retrieve(self, request: Request, pk=None) -> Response:
|
||||
"""Get a single system task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
if not task:
|
||||
raise Http404
|
||||
return Response(TaskSerializer(task, many=False).data)
|
||||
|
||||
@extend_schema(responses={200: TaskSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""List system tasks"""
|
||||
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
||||
return Response(TaskSerializer(tasks, many=True).data)
|
||||
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
204: OpenApiResponse(description="Task retried successfully"),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
500: OpenApiResponse(description="Failed to retry task"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
if not task:
|
||||
raise Http404
|
||||
try:
|
||||
task_module = import_module(task.task_call_module)
|
||||
task_func = getattr(task_module, task.task_call_func)
|
||||
LOGGER.debug("Running task", task=task_func)
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(status=204)
|
||||
except (ImportError, AttributeError): # pragma: no cover
|
||||
LOGGER.warning("Failed to run task, remove state", task=task)
|
||||
# if we get an import error, the module path has probably changed
|
||||
task.delete()
|
||||
return Response(status=500)
|
@ -1,4 +1,5 @@
|
||||
"""authentik administration overview"""
|
||||
|
||||
from django.core.cache import cache
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from packaging.version import parse
|
||||
@ -9,7 +10,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
|
||||
@ -18,6 +19,7 @@ class VersionSerializer(PassiveSerializer):
|
||||
|
||||
version_current = SerializerMethodField()
|
||||
version_latest = SerializerMethodField()
|
||||
version_latest_valid = SerializerMethodField()
|
||||
build_hash = SerializerMethodField()
|
||||
outdated = SerializerMethodField()
|
||||
|
||||
@ -37,6 +39,10 @@ class VersionSerializer(PassiveSerializer):
|
||||
return __version__
|
||||
return version_in_cache
|
||||
|
||||
def get_version_latest_valid(self, _) -> bool:
|
||||
"""Check if latest version is valid"""
|
||||
return cache.get(VERSION_CACHE_KEY) != VERSION_NULL
|
||||
|
||||
def get_outdated(self, instance) -> bool:
|
||||
"""Check if we're running the latest version"""
|
||||
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
|
||||
|
@ -1,19 +1,20 @@
|
||||
"""authentik administration overview"""
|
||||
|
||||
from django.conf import settings
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.fields import IntegerField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
|
||||
class WorkerView(APIView):
|
||||
"""Get currently connected worker count."""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
|
||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||
def get(self, request: Request) -> Response:
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""authentik admin app config"""
|
||||
|
||||
from prometheus_client import Gauge, Info
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
@ -14,7 +15,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
|
||||
label = "authentik_admin"
|
||||
verbose_name = "authentik Admin"
|
||||
default = True
|
||||
|
||||
def reconcile_load_admin_signals(self):
|
||||
"""Load admin signals"""
|
||||
self.import_module("authentik.admin.signals")
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""authentik admin settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""admin signals"""
|
||||
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.admin.api.tasks import TaskInfo
|
||||
from authentik.admin.apps import GAUGE_WORKERS
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
@ -12,10 +12,3 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
GAUGE_WORKERS.set(count)
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.update_metrics()
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""authentik admin tasks"""
|
||||
|
||||
import re
|
||||
|
||||
from django.core.cache import cache
|
||||
@ -11,17 +12,13 @@ from structlog.stdlib import get_logger
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.apps import PROM_INFO
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
VERSION_NULL = "0.0.0"
|
||||
VERSION_CACHE_KEY = "authentik_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||
# Chop of the first ^ because we want to search the entire string
|
||||
@ -54,13 +51,13 @@ def clear_update_notifications():
|
||||
notification.delete()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
def update_latest_version(self: SystemTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.get_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskStatus.WARNING, "Version check disabled.")
|
||||
return
|
||||
try:
|
||||
response = get_http_session().get(
|
||||
@ -70,9 +67,7 @@ def update_latest_version(self: MonitoredTask):
|
||||
data = response.json()
|
||||
upstream_version = data.get("stable", {}).get("version")
|
||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(
|
||||
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
||||
)
|
||||
self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
|
||||
_set_prom_info()
|
||||
# Check if upstream version is newer than what we're running,
|
||||
# and if no event exists yet, create one.
|
||||
@ -88,8 +83,8 @@ def update_latest_version(self: MonitoredTask):
|
||||
event_dict["message"] = f"Changelog: {match.group()}"
|
||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.set_error(exc)
|
||||
|
||||
|
||||
_set_prom_info()
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""test admin api"""
|
||||
|
||||
from json import loads
|
||||
|
||||
from django.test import TestCase
|
||||
@ -7,8 +8,6 @@ from django.urls import reverse
|
||||
from authentik import __version__
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
@ -23,53 +22,6 @@ class TestAdminAPI(TestCase):
|
||||
self.group.save()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_tasks(self):
|
||||
"""Test Task API"""
|
||||
clean_expired_models.delay()
|
||||
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
|
||||
|
||||
def test_tasks_single(self):
|
||||
"""Test Task API (read single)"""
|
||||
clean_expired_models.delay()
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:admin_system_tasks-detail",
|
||||
kwargs={"pk": "clean_expired_models"},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
|
||||
self.assertEqual(body["task_name"], "clean_expired_models")
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_tasks_retry(self):
|
||||
"""Test Task API (retry)"""
|
||||
clean_expired_models.delay()
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:admin_system_tasks-retry",
|
||||
kwargs={"pk": "clean_expired_models"},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
def test_tasks_retry_404(self):
|
||||
"""Test Task API (retry, 404)"""
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:admin_system_tasks-retry",
|
||||
kwargs={"pk": "qwerqewrqrqewrqewr"},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_version(self):
|
||||
"""Test Version API"""
|
||||
response = self.client.get(reverse("authentik_api:admin_version"))
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""test admin tasks"""
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.test import TestCase
|
||||
from requests_mock import Mocker
|
||||
|
@ -1,15 +1,14 @@
|
||||
"""API URLs"""
|
||||
|
||||
from django.urls import path
|
||||
|
||||
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
|
||||
api_urlpatterns = [
|
||||
("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
|
||||
("admin/apps", AppsViewSet, "apps"),
|
||||
("admin/models", ModelViewSet, "models"),
|
||||
path(
|
||||
|
@ -10,26 +10,3 @@ class AuthentikAPIConfig(AppConfig):
|
||||
label = "authentik_api"
|
||||
mountpoint = "api/"
|
||||
verbose_name = "authentik API"
|
||||
|
||||
def ready(self) -> None:
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
|
||||
from authentik.api.authentication import TokenAuthentication
|
||||
|
||||
# Class is defined here as it needs to be created early enough that drf-spectacular will
|
||||
# find it, but also won't cause any import issues
|
||||
# pylint: disable=unused-variable
|
||||
class TokenSchema(OpenApiAuthenticationExtension):
|
||||
"""Auth schema"""
|
||||
|
||||
target_class = TokenAuthentication
|
||||
name = "authentik"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
"""Auth schema"""
|
||||
return {
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
"scheme": "bearer",
|
||||
}
|
||||
|
@ -1,8 +1,10 @@
|
||||
"""API Authentication"""
|
||||
|
||||
from hmac import compare_digest
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
@ -16,7 +18,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def validate_auth(header: bytes) -> Optional[str]:
|
||||
def validate_auth(header: bytes) -> str | None:
|
||||
"""Validate that the header is in a correct format,
|
||||
returns type and credentials"""
|
||||
auth_credentials = header.decode().strip()
|
||||
@ -31,7 +33,7 @@ def validate_auth(header: bytes) -> Optional[str]:
|
||||
return auth_credentials
|
||||
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
def bearer_auth(raw_header: bytes) -> User | None:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
user = auth_user_lookup(raw_header)
|
||||
if not user:
|
||||
@ -41,7 +43,7 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
return user
|
||||
|
||||
|
||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
def auth_user_lookup(raw_header: bytes) -> User | None:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
@ -74,7 +76,7 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
|
||||
|
||||
def token_secret_key(value: str) -> Optional[User]:
|
||||
def token_secret_key(value: str) -> User | None:
|
||||
"""Check if the token is the secret key
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
@ -101,3 +103,14 @@ class TokenAuthentication(BaseAuthentication):
|
||||
return None
|
||||
|
||||
return (user, None) # pragma: no cover
|
||||
|
||||
|
||||
class TokenSchema(OpenApiAuthenticationExtension):
|
||||
"""Auth schema"""
|
||||
|
||||
target_class = TokenAuthentication
|
||||
name = "authentik"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
"""Auth schema"""
|
||||
return {"type": "http", "scheme": "bearer"}
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""API Authorization"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Model
|
||||
from django.db.models.query import QuerySet
|
||||
@ -7,9 +8,9 @@ from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
from authentik.rbac.filters import ObjectFilter
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
@ -26,14 +27,14 @@ class OwnerFilter(BaseFilterBackend):
|
||||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||
Replaces both DjangoFilterBackend and ObjectFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||
queryset = ObjectFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
|
@ -1,35 +0,0 @@
|
||||
"""API Decorators"""
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional
|
||||
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||
"""Check permissions for a single custom action"""
|
||||
|
||||
def wrapper_outter(func: Callable):
|
||||
"""Check permissions for a single custom action"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
||||
if perm:
|
||||
obj = self.get_object()
|
||||
if not request.user.has_perm(perm, obj):
|
||||
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||
return self.permission_denied(request)
|
||||
if other_perms:
|
||||
for other_perm in other_perms:
|
||||
if not request.user.has_perm(other_perm):
|
||||
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||
return self.permission_denied(request)
|
||||
return func(self, request, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return wrapper_outter
|
@ -1,4 +1,5 @@
|
||||
"""Pagination which includes total pages and current page"""
|
||||
|
||||
from rest_framework import pagination
|
||||
from rest_framework.response import Response
|
||||
|
||||
@ -77,3 +78,10 @@ class Pagination(pagination.PageNumberPagination):
|
||||
},
|
||||
"required": ["pagination", "results"],
|
||||
}
|
||||
|
||||
|
||||
class SmallerPagination(Pagination):
|
||||
"""Smaller pagination for objects which might require a lot of queries
|
||||
to retrieve all data for."""
|
||||
|
||||
max_page_size = 10
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.generators import SchemaGenerator
|
||||
from drf_spectacular.plumbing import (
|
||||
@ -11,6 +12,7 @@ from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from rest_framework.settings import api_settings
|
||||
|
||||
from authentik.api.apps import AuthentikAPIConfig
|
||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
||||
|
||||
|
||||
@ -100,3 +102,12 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
|
||||
comp = result["components"]["schemas"][component]
|
||||
comp["additionalProperties"] = {}
|
||||
return result
|
||||
|
||||
|
||||
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
|
||||
"""Filter out all API Views which are not mounted under /api"""
|
||||
return [
|
||||
(path, path_regex, method, callback)
|
||||
for path, path_regex, method, callback in endpoints
|
||||
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
|
||||
]
|
||||
|
@ -1,13 +1,13 @@
|
||||
{% extends "base/skeleton.html" %}
|
||||
|
||||
{% load static %}
|
||||
{% load authentik_core %}
|
||||
|
||||
{% block title %}
|
||||
API Browser - {{ tenant.branding_title }}
|
||||
API Browser - {{ brand.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
||||
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
{% endblock %}
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test API Authentication"""
|
||||
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
@ -12,6 +13,8 @@ from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
@ -22,17 +25,17 @@ class TestAPIAuth(TestCase):
|
||||
def test_invalid_type(self):
|
||||
"""Test invalid type"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth("foo bar".encode())
|
||||
bearer_auth(b"foo bar")
|
||||
|
||||
def test_invalid_empty(self):
|
||||
"""Test invalid type"""
|
||||
self.assertIsNone(bearer_auth("Bearer ".encode()))
|
||||
self.assertIsNone(bearer_auth("".encode()))
|
||||
self.assertIsNone(bearer_auth(b"Bearer "))
|
||||
self.assertIsNone(bearer_auth(b""))
|
||||
|
||||
def test_invalid_no_token(self):
|
||||
"""Test invalid with no token"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
auth = b64encode(":abc".encode()).decode()
|
||||
auth = b64encode(b":abc").decode()
|
||||
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
|
||||
|
||||
def test_bearer_valid(self):
|
||||
@ -49,8 +52,12 @@ class TestAPIAuth(TestCase):
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
def test_managed_outpost(self):
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_fail(self):
|
||||
"""Test managed outpost"""
|
||||
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
outpost.user.delete()
|
||||
outpost.delete()
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test config API"""
|
||||
|
||||
from json import loads
|
||||
|
||||
from django.urls import reverse
|
||||
|
@ -1,34 +0,0 @@
|
||||
"""test decorators api"""
|
||||
from django.urls import reverse
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPIDecorators(APITestCase):
|
||||
"""test decorators api"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_obj_perm_denied(self):
|
||||
"""Test object perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_other_perm_denied(self):
|
||||
"""Test other perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
assign_perm("authentik_core.view_application", self.user, app)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
@ -1,4 +1,5 @@
|
||||
"""Schema generation tests"""
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
from yaml import safe_load
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""authentik API Modelviewset tests"""
|
||||
from typing import Callable
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from django.test import TestCase
|
||||
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||
@ -16,6 +17,7 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
||||
|
||||
def tester(self: TestModelViewSets):
|
||||
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
||||
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
|
||||
filterset_class = getattr(test_viewset, "filterset_class", None)
|
||||
if not filterset_class:
|
||||
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
||||
@ -24,6 +26,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
||||
|
||||
|
||||
for _, viewset, _ in router.registry:
|
||||
if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
|
||||
if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet):
|
||||
continue
|
||||
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""authentik api urls"""
|
||||
|
||||
from django.urls import include, path
|
||||
|
||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""core Configs API"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@ -19,7 +20,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.context_processors.base import get_context_processors
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
capabilities = Signal()
|
||||
@ -30,6 +31,7 @@ class Capabilities(models.TextChoices):
|
||||
|
||||
CAN_SAVE_MEDIA = "can_save_media"
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_ASN = "can_asn"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
IS_ENTERPRISE = "is_enterprise"
|
||||
@ -66,11 +68,16 @@ class ConfigView(APIView):
|
||||
"""Get all capabilities this server instance supports"""
|
||||
caps = []
|
||||
deb_test = settings.DEBUG or settings.TEST
|
||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
||||
if (
|
||||
CONFIG.get("storage.media.backend", "file") == "s3"
|
||||
or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount()
|
||||
or deb_test
|
||||
):
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.get_bool("impersonation"):
|
||||
for processor in get_context_processors():
|
||||
if cap := processor.capability():
|
||||
caps.append(cap)
|
||||
if self.request.tenant.impersonation:
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
@ -93,10 +100,10 @@ class ConfigView(APIView):
|
||||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
||||
"cache_timeout": CONFIG.get_int("cache.timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""api v3 urls"""
|
||||
|
||||
from importlib import import_module
|
||||
|
||||
from django.urls import path
|
||||
@ -21,7 +22,9 @@ _other_urls = []
|
||||
for _authentik_app in get_apps():
|
||||
try:
|
||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||
except (ModuleNotFoundError, ImportError) as exc:
|
||||
except ModuleNotFoundError:
|
||||
continue
|
||||
except ImportError as exc:
|
||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||
continue
|
||||
if not hasattr(api_urls, "api_urlpatterns"):
|
||||
@ -30,7 +33,7 @@ for _authentik_app in get_apps():
|
||||
app_name=_authentik_app.name,
|
||||
)
|
||||
continue
|
||||
urls: list = getattr(api_urls, "api_urlpatterns")
|
||||
urls: list = api_urls.api_urlpatterns
|
||||
for url in urls:
|
||||
if isinstance(url, URLPattern):
|
||||
_other_urls.append(url)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""General API Views"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.urls import reverse
|
||||
|
@ -1,22 +1,22 @@
|
||||
"""Serializer mixin for managed models"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.fields import CharField, DateTimeField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
|
||||
class ManagedSerializer:
|
||||
@ -29,7 +29,7 @@ class MetadataSerializer(PassiveSerializer):
|
||||
"""Serializer for blueprint metadata"""
|
||||
|
||||
name = CharField()
|
||||
labels = JSONField()
|
||||
labels = JSONDictField()
|
||||
|
||||
|
||||
class BlueprintInstanceSerializer(ModelSerializer):
|
||||
@ -52,7 +52,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
raise ValidationError(
|
||||
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
|
||||
)
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
@ -87,11 +89,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Blueprint instances"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = BlueprintInstanceSerializer
|
||||
queryset = BlueprintInstance.objects.all()
|
||||
search_fields = ["name", "path"]
|
||||
filterset_fields = ["name", "path"]
|
||||
ordering = ["name"]
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""authentik Blueprints app"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from importlib import import_module
|
||||
from inspect import ismethod
|
||||
|
||||
@ -7,40 +8,100 @@ from django.apps import AppConfig
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.root.signals import startup
|
||||
|
||||
|
||||
class ManagedAppConfig(AppConfig):
|
||||
"""Basic reconciliation logic for apps"""
|
||||
|
||||
_logger: BoundLogger
|
||||
logger: BoundLogger
|
||||
|
||||
RECONCILE_GLOBAL_CATEGORY: str = "global"
|
||||
RECONCILE_TENANT_CATEGORY: str = "tenant"
|
||||
|
||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||
super().__init__(app_name, *args, **kwargs)
|
||||
self._logger = get_logger().bind(app_name=app_name)
|
||||
self.logger = get_logger().bind(app_name=app_name)
|
||||
|
||||
def ready(self) -> None:
|
||||
self.reconcile()
|
||||
self.import_related()
|
||||
startup.connect(self._on_startup_callback, dispatch_uid=self.label)
|
||||
return super().ready()
|
||||
|
||||
def _on_startup_callback(self, sender, **_):
|
||||
self._reconcile_global()
|
||||
self._reconcile_tenant()
|
||||
|
||||
def import_related(self):
|
||||
"""Automatically import related modules which rely on just being imported
|
||||
to register themselves (mainly django signals and celery tasks)"""
|
||||
|
||||
def import_relative(rel_module: str):
|
||||
try:
|
||||
module_name = f"{self.name}.{rel_module}"
|
||||
import_module(module_name)
|
||||
self.logger.info("Imported related module", module=module_name)
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
|
||||
import_relative("checks")
|
||||
import_relative("tasks")
|
||||
import_relative("signals")
|
||||
|
||||
def import_module(self, path: str):
|
||||
"""Load module"""
|
||||
import_module(path)
|
||||
|
||||
def reconcile(self) -> None:
|
||||
"""reconcile ourselves"""
|
||||
prefix = "reconcile_"
|
||||
def _reconcile(self, prefix: str) -> None:
|
||||
for meth_name in dir(self):
|
||||
meth = getattr(self, meth_name)
|
||||
if not ismethod(meth):
|
||||
continue
|
||||
if not meth_name.startswith(prefix):
|
||||
category = getattr(meth, "_authentik_managed_reconcile", None)
|
||||
if category != prefix:
|
||||
continue
|
||||
name = meth_name.replace(prefix, "")
|
||||
try:
|
||||
self._logger.debug("Starting reconciler", name=name)
|
||||
self.logger.debug("Starting reconciler", name=name)
|
||||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
self.logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
self.logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||
|
||||
@staticmethod
|
||||
def reconcile_tenant(func: Callable):
|
||||
"""Mark a function to be called on startup (for each tenant)"""
|
||||
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY
|
||||
return func
|
||||
|
||||
@staticmethod
|
||||
def reconcile_global(func: Callable):
|
||||
"""Mark a function to be called on startup (globally)"""
|
||||
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
|
||||
return func
|
||||
|
||||
def _reconcile_tenant(self) -> None:
|
||||
"""reconcile ourselves for tenanted methods"""
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
try:
|
||||
tenants = list(Tenant.objects.filter(ready=True))
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self.logger.debug("Failed to get tenants to run reconcile", exc=exc)
|
||||
return
|
||||
for tenant in tenants:
|
||||
with tenant:
|
||||
self._reconcile(self.RECONCILE_TENANT_CATEGORY)
|
||||
|
||||
def _reconcile_global(self) -> None:
|
||||
"""
|
||||
reconcile ourselves for global methods.
|
||||
Used for signals, tasks, etc. Database queries should not be made in here.
|
||||
"""
|
||||
from django_tenants.utils import get_public_schema_name, schema_context
|
||||
|
||||
with schema_context(get_public_schema_name()):
|
||||
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
@ -51,11 +112,13 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
verbose_name = "authentik Blueprints"
|
||||
default = True
|
||||
|
||||
def reconcile_load_blueprints_v1_tasks(self):
|
||||
@ManagedAppConfig.reconcile_global
|
||||
def load_blueprints_v1_tasks(self):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
def reconcile_blueprints_discovery(self):
|
||||
@ManagedAppConfig.reconcile_tenant
|
||||
def blueprints_discovery(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Apply blueprint from commandline"""
|
||||
|
||||
from sys import exit as sys_exit
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
@ -6,6 +7,7 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -16,14 +18,16 @@ class Command(BaseCommand):
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Apply all blueprints in order, abort when one fails to import"""
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer.from_string(content)
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
for tenant in Tenant.objects.filter(ready=True):
|
||||
with tenant:
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer.from_string(content)
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("blueprints", nargs="+", type=str)
|
||||
|
@ -1,17 +1,19 @@
|
||||
"""Export blueprint of current authentik install"""
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
|
||||
from django.core.management.base import no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.exporter import Exporter
|
||||
from authentik.tenants.management import TenantCommand
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
class Command(TenantCommand):
|
||||
"""Export blueprint of current authentik install"""
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
def handle_per_tenant(self, *args, **options):
|
||||
"""Export blueprint of current authentik install"""
|
||||
exporter = Exporter()
|
||||
self.stdout.write(exporter.export_to_string())
|
||||
|
@ -1,14 +1,17 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
|
||||
from json import dumps
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from django.db.models import Model
|
||||
from drf_jsonschema_serializer.convert import field_to_converter
|
||||
from django.db.models import Model, fields
|
||||
from drf_jsonschema_serializer.convert import converter, field_to_converter
|
||||
from rest_framework.fields import Field, JSONField, UUIDField
|
||||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__
|
||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
@ -17,6 +20,23 @@ from authentik.lib.models import SerializerModel
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@converter
|
||||
class PrimaryKeyRelatedFieldConverter:
|
||||
"""Custom primary key field converter which is aware of non-integer based PKs
|
||||
|
||||
This is not an exhaustive fix for other non-int PKs, however in authentik we either
|
||||
use UUIDs or ints"""
|
||||
|
||||
field_class = PrimaryKeyRelatedField
|
||||
|
||||
def convert(self, field: PrimaryKeyRelatedField):
|
||||
model: Model = field.queryset.model
|
||||
pk_field = model._meta.pk
|
||||
if isinstance(pk_field, fields.UUIDField):
|
||||
return {"type": "string", "format": "uuid"}
|
||||
return {"type": "integer"}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
|
||||
@ -28,7 +48,7 @@ class Command(BaseCommand):
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"title": f"authentik {__version__} Blueprint schema",
|
||||
"required": ["version", "entries"],
|
||||
"properties": {
|
||||
"version": {
|
||||
|
@ -14,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
||||
"""Check if blueprint should be imported"""
|
||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
if version != 1:
|
||||
return
|
||||
blueprint_file.seek(0)
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||
instance: BlueprintInstance = (
|
||||
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||
)
|
||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
@ -37,7 +39,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
||||
return
|
||||
if not instance:
|
||||
instance = BlueprintInstance(
|
||||
BlueprintInstance.objects.using(db_alias).create(
|
||||
name=meta.name if meta else str(rel_path),
|
||||
path=str(rel_path),
|
||||
context={},
|
||||
@ -47,7 +49,6 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
last_applied_hash="",
|
||||
metadata=metadata or {},
|
||||
)
|
||||
instance.save()
|
||||
|
||||
|
||||
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
@ -56,7 +57,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||
check_blueprint_v1_file(BlueprintInstance, db_alias, Path(file))
|
||||
|
||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||
# If we already have flows (and we should always run before flow migrations)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""blueprint models"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
@ -70,6 +71,19 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
enabled = models.BooleanField(default=True)
|
||||
managed_models = ArrayField(models.TextField(), default=list)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
(
|
||||
"name",
|
||||
"path",
|
||||
),
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
def retrieve_oci(self) -> str:
|
||||
"""Get blueprint from an OCI registry"""
|
||||
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
|
||||
@ -88,7 +102,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
except (IOError, OSError) as exc:
|
||||
except OSError as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve(self) -> str:
|
||||
@ -104,16 +118,3 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
from authentik.blueprints.api import BlueprintInstanceSerializer
|
||||
|
||||
return BlueprintInstanceSerializer
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
(
|
||||
"name",
|
||||
"path",
|
||||
),
|
||||
)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""blueprint Settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Blueprint helpers"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
@ -38,7 +39,7 @@ def reconcile_app(app_name: str):
|
||||
def wrapper(*args, **kwargs):
|
||||
config = apps.get_app_config(app_name)
|
||||
if isinstance(config, ManagedAppConfig):
|
||||
config.reconcile()
|
||||
config._on_startup_callback(None)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""authentik managed models tests"""
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints OCI"""
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
|
@ -1,22 +1,23 @@
|
||||
"""test packaged blueprints"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.tenants.models import Tenant
|
||||
from authentik.brands.models import Brand
|
||||
|
||||
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/default-tenant.yaml")
|
||||
@apply_blueprint("default/default-brand.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
self.assertTrue(Brand.objects.filter(domain="authentik-default").exists())
|
||||
|
||||
|
||||
def blueprint_tester(file_name: Path) -> Callable:
|
||||
|
@ -1,18 +1,20 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
@ -21,6 +23,9 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
if model_class.serializer.Meta().model == RefreshToken:
|
||||
return
|
||||
self.assertEqual(model_class.serializer.Meta().model, test_model)
|
||||
|
||||
return tester
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1"""
|
||||
|
||||
from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1 api"""
|
||||
|
||||
from json import loads
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1"""
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1"""
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1"""
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Test blueprints v1 tasks"""
|
||||
|
||||
from hashlib import sha512
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
|
||||
@ -53,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery()
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
@ -81,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery()
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
@ -106,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery()
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
@ -148,7 +149,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
instance.status,
|
||||
BlueprintInstanceStatus.UNKNOWN,
|
||||
)
|
||||
apply_blueprint(instance.pk) # pylint: disable=no-value-for-parameter
|
||||
apply_blueprint(instance.pk)
|
||||
instance.refresh_from_db()
|
||||
self.assertEqual(instance.last_applied_hash, "")
|
||||
self.assertEqual(
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""API URLs"""
|
||||
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
|
@ -1,12 +1,14 @@
|
||||
"""transfer common classes"""
|
||||
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Iterable, Mapping
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||
from enum import Enum
|
||||
from functools import reduce
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
||||
from typing import Any, Literal, Union
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
@ -44,7 +46,7 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
class BlueprintEntryState:
|
||||
"""State of a single instance"""
|
||||
|
||||
instance: Optional[Model] = None
|
||||
instance: Model | None = None
|
||||
|
||||
|
||||
class BlueprintEntryDesiredState(Enum):
|
||||
@ -66,14 +68,14 @@ class BlueprintEntry:
|
||||
)
|
||||
conditions: list[Any] = field(default_factory=list)
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||
attrs: dict[str, Any] | None = field(default_factory=dict)
|
||||
|
||||
id: Optional[str] = None
|
||||
id: str | None = None
|
||||
|
||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||
|
||||
def __post_init__(self, *args, **kwargs) -> None:
|
||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
||||
self.__tag_contexts: list[YAMLTagContext] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
@ -91,10 +93,10 @@ class BlueprintEntry:
|
||||
attrs=all_attrs,
|
||||
)
|
||||
|
||||
def _get_tag_context(
|
||||
def get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
||||
context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
@ -107,8 +109,8 @@ class BlueprintEntry:
|
||||
|
||||
try:
|
||||
return contexts[-(depth + 1)]
|
||||
except IndexError:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
||||
except IndexError as exc:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
@ -169,7 +171,7 @@ class Blueprint:
|
||||
entries: list[BlueprintEntry] = field(default_factory=list)
|
||||
context: dict = field(default_factory=dict)
|
||||
|
||||
metadata: Optional[BlueprintMetadata] = field(default=None)
|
||||
metadata: BlueprintMetadata | None = field(default=None)
|
||||
|
||||
|
||||
class YAMLTag:
|
||||
@ -217,7 +219,7 @@ class Env(YAMLTag):
|
||||
"""Lookup environment variable with optional default"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
@ -236,7 +238,7 @@ class Context(YAMLTag):
|
||||
"""Lookup key from instance context"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
@ -280,7 +282,7 @@ class Format(YAMLTag):
|
||||
try:
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
|
||||
|
||||
class Find(YAMLTag):
|
||||
@ -365,7 +367,7 @@ class Condition(YAMLTag):
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
@ -397,7 +399,7 @@ class If(YAMLTag):
|
||||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
@ -411,9 +413,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
"SEQ": (list, lambda a, b: [*a, b]),
|
||||
"MAP": (
|
||||
dict,
|
||||
lambda a, b: always_merger.merge(
|
||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
||||
),
|
||||
lambda a, b: always_merger.merge(a, {b[0]: b[1]} if isinstance(b, tuple | list) else b),
|
||||
),
|
||||
}
|
||||
|
||||
@ -455,7 +455,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
|
||||
result = output_class()
|
||||
|
||||
@ -483,13 +483,13 @@ class EnumeratedItem(YAMLTag):
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
context_tag: Enumerate = entry._get_tag_context(
|
||||
context_tag: Enumerate = entry.get_tag_context(
|
||||
depth=self.depth,
|
||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||
)
|
||||
@ -499,9 +499,11 @@ class EnumeratedItem(YAMLTag):
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag",
|
||||
entry,
|
||||
)
|
||||
) from exc
|
||||
|
||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"{self.__class__.__name__} tag: {exc}", entry
|
||||
) from exc
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
@ -514,8 +516,8 @@ class Index(EnumeratedItem):
|
||||
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
except IndexError as exc: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
@ -526,8 +528,8 @@ class Value(EnumeratedItem):
|
||||
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
except IndexError as exc: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
@ -554,7 +556,11 @@ class BlueprintDumper(SafeDumper):
|
||||
|
||||
def factory(items):
|
||||
final_dict = dict(items)
|
||||
# Remove internal state variables
|
||||
final_dict.pop("_state", None)
|
||||
# Future-proof to only remove the ID if we don't set a value
|
||||
if "id" in final_dict and final_dict.get("id") is None:
|
||||
final_dict.pop("id")
|
||||
return final_dict
|
||||
|
||||
data = asdict(data, dict_factory=factory)
|
||||
@ -581,15 +587,20 @@ class BlueprintLoader(SafeLoader):
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
entry_model: Optional[str]
|
||||
entry_id: Optional[str]
|
||||
validation_error: Optional[ValidationError]
|
||||
entry_model: str | None
|
||||
entry_id: str | None
|
||||
validation_error: ValidationError | None
|
||||
serializer: Serializer | None = None
|
||||
|
||||
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
|
||||
def __init__(
|
||||
self, *args: object, validation_error: ValidationError | None = None, **kwargs
|
||||
) -> None:
|
||||
super().__init__(*args)
|
||||
self.entry_model = None
|
||||
self.entry_id = None
|
||||
self.validation_error = validation_error
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
@staticmethod
|
||||
def from_entry(
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Blueprint exporter"""
|
||||
from typing import Iterable
|
||||
|
||||
from collections.abc import Iterable
|
||||
from uuid import UUID
|
||||
|
||||
from django.apps import apps
|
||||
@ -7,7 +8,6 @@ from django.contrib.auth import get_user_model
|
||||
from django.db.models import Model, Q, QuerySet
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
@ -48,7 +48,7 @@ class Exporter:
|
||||
"""Return a queryset for `model`. Can be used to filter some
|
||||
objects on some models"""
|
||||
if model == get_user_model():
|
||||
return model.objects.exclude(pk=get_anonymous_user().pk)
|
||||
return model.objects.exclude_anonymous()
|
||||
return model.objects.all()
|
||||
|
||||
def _pre_export(self, blueprint: Blueprint):
|
||||
@ -59,7 +59,7 @@ class Exporter:
|
||||
blueprint = Blueprint()
|
||||
self._pre_export(blueprint)
|
||||
blueprint.metadata = BlueprintMetadata(
|
||||
name=_("authentik Export - %(date)s" % {"date": str(now())}),
|
||||
name=_("authentik Export - {date}".format_map({"date": str(now())})),
|
||||
labels={
|
||||
LABEL_AUTHENTIK_GENERATED: "true",
|
||||
},
|
||||
@ -74,7 +74,7 @@ class Exporter:
|
||||
|
||||
|
||||
class FlowExporter(Exporter):
|
||||
"""Exporter customised to only return objects related to `flow`"""
|
||||
"""Exporter customized to only return objects related to `flow`"""
|
||||
|
||||
flow: Flow
|
||||
with_policies: bool
|
||||
|
@ -1,22 +1,24 @@
|
||||
"""Blueprint importer"""
|
||||
|
||||
from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from guardian.models import UserObjectPermission
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
from structlog.testing import capture_logs
|
||||
from structlog.types import EventDict
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
@ -35,27 +37,51 @@ from authentik.core.models import (
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import LicenseUsage
|
||||
from authentik.enterprise.providers.google_workspace.models import (
|
||||
GoogleWorkspaceProviderGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.events.models import SystemTask
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.policies.reputation.models import Reputation
|
||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
# pylint: disable=imported-auth-user
|
||||
def excluded_models() -> list[type[Model]]:
|
||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
||||
or other means (internal only, base classes, non-used objects, etc)"""
|
||||
|
||||
from django.contrib.auth.models import Group as DjangoGroup
|
||||
from django.contrib.auth.models import User as DjangoUser
|
||||
|
||||
excluded_models = (
|
||||
return (
|
||||
# Django only classes
|
||||
DjangoUser,
|
||||
DjangoGroup,
|
||||
ContentType,
|
||||
Permission,
|
||||
UserObjectPermission,
|
||||
# Base classes
|
||||
Provider,
|
||||
Source,
|
||||
@ -68,9 +94,31 @@ def is_model_allowed(model: type[Model]) -> bool:
|
||||
# Classes that have other dependencies
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||
FlowToken,
|
||||
LicenseUsage,
|
||||
SCIMProviderGroup,
|
||||
SCIMProviderUser,
|
||||
Tenant,
|
||||
SystemTask,
|
||||
ConnectionToken,
|
||||
AuthorizationCode,
|
||||
AccessToken,
|
||||
RefreshToken,
|
||||
Reputation,
|
||||
WebAuthnDeviceType,
|
||||
SCIMSourceUser,
|
||||
SCIMSourceGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
GoogleWorkspaceProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
MicrosoftEntraProviderGroup,
|
||||
)
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel)
|
||||
|
||||
|
||||
class DoRollback(SentryIgnoredException):
|
||||
@ -94,16 +142,20 @@ class Importer:
|
||||
logger: BoundLogger
|
||||
_import: Blueprint
|
||||
|
||||
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
||||
def __init__(self, blueprint: Blueprint, context: dict | None = None):
|
||||
self.__pk_map: dict[Any, Model] = {}
|
||||
self._import = blueprint
|
||||
self.logger = get_logger()
|
||||
ctx = {}
|
||||
ctx = self.default_context()
|
||||
always_merger.merge(ctx, self._import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self._import.context = ctx
|
||||
|
||||
def default_context(self):
|
||||
"""Default context"""
|
||||
return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()}
|
||||
|
||||
@staticmethod
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||
"""Parse YAML string and create blueprint importer from it"""
|
||||
@ -126,14 +178,14 @@ class Importer:
|
||||
|
||||
def updater(value) -> Any:
|
||||
if value in self.__pk_map:
|
||||
self.logger.debug("updating reference in entry", value=value)
|
||||
self.logger.debug("Updating reference in entry", value=value)
|
||||
return self.__pk_map[value]
|
||||
return value
|
||||
|
||||
for key, value in attrs.items():
|
||||
try:
|
||||
if isinstance(value, dict):
|
||||
for idx, _inner_key in enumerate(value):
|
||||
for _, _inner_key in enumerate(value):
|
||||
value[_inner_key] = updater(value[_inner_key])
|
||||
elif isinstance(value, list):
|
||||
for idx, _inner_value in enumerate(value):
|
||||
@ -162,8 +214,7 @@ class Importer:
|
||||
|
||||
return main_query | sub_query
|
||||
|
||||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:
|
||||
"""Validate a single entry"""
|
||||
if not entry.check_all_conditions_match(self._import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
@ -216,7 +267,7 @@ class Importer:
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
self.logger.debug(
|
||||
"initialise serializer with instance",
|
||||
"Initialise serializer with instance",
|
||||
model=model,
|
||||
instance=model_instance,
|
||||
pk=model_instance.pk,
|
||||
@ -226,14 +277,14 @@ class Importer:
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
raise EntryInvalidError.from_entry(
|
||||
(
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance",
|
||||
"Initialised new serializer instance",
|
||||
model=model,
|
||||
**cleanse_dict(updated_identifiers),
|
||||
)
|
||||
@ -245,7 +296,10 @@ class Importer:
|
||||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
raise EntryInvalidError.from_entry(
|
||||
exc,
|
||||
entry,
|
||||
) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
@ -262,6 +316,7 @@ class Importer:
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
serializer=serializer,
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
@ -286,20 +341,22 @@ class Importer:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
self.logger.warning(
|
||||
"app or model does not exist", app=model_app_label, model=model_name
|
||||
"App or Model does not exist", app=model_app_label, model=model_name
|
||||
)
|
||||
return False
|
||||
# Validate each single entry
|
||||
serializer = None
|
||||
try:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
# For deleting objects we don't need the serializer to be valid
|
||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||
continue
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
serializer = exc.serializer
|
||||
else:
|
||||
self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
@ -316,27 +373,27 @@ class Importer:
|
||||
and state == BlueprintEntryDesiredState.CREATED
|
||||
):
|
||||
self.logger.debug(
|
||||
"instance exists, skipping",
|
||||
"Instance exists, skipping",
|
||||
model=model,
|
||||
instance=instance,
|
||||
pk=instance.pk,
|
||||
)
|
||||
else:
|
||||
instance = serializer.save()
|
||||
self.logger.debug("updated model", model=instance)
|
||||
self.logger.debug("Updated model", model=instance)
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
||||
entry._state = BlueprintEntryState(instance)
|
||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||
instance: Optional[Model] = serializer.instance
|
||||
instance: Model | None = serializer.instance
|
||||
if instance.pk:
|
||||
instance.delete()
|
||||
self.logger.debug("deleted model", mode=instance)
|
||||
self.logger.debug("Deleted model", mode=instance)
|
||||
continue
|
||||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
self.logger.debug("Entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
@ -350,9 +407,7 @@ class Importer:
|
||||
):
|
||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.warning("Blueprint validation failed")
|
||||
self.logger.debug("Finished blueprint import validation")
|
||||
self._import = orig_import
|
||||
return successful, logs
|
||||
|
@ -1,12 +1,13 @@
|
||||
"""Apply Blueprint meta model"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, JSONField
|
||||
from rest_framework.fields import BooleanField
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
@ -17,7 +18,7 @@ LOGGER = get_logger()
|
||||
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
"""Serializer for meta apply blueprint model"""
|
||||
|
||||
identifiers = JSONField(validators=[is_dict])
|
||||
identifiers = JSONDictField()
|
||||
required = BooleanField(default=True)
|
||||
|
||||
# We cannot override `instance` as that will confuse rest_framework
|
||||
@ -42,7 +43,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
LOGGER.info("Blueprint does not exist, but not required")
|
||||
return MetaResult()
|
||||
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
|
||||
apply_blueprint(str(self.blueprint_instance.pk))
|
||||
return MetaResult()
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Base models"""
|
||||
|
||||
from django.apps import apps
|
||||
from django.db.models import Model
|
||||
from rest_framework.serializers import Serializer
|
||||
@ -7,15 +8,15 @@ from rest_framework.serializers import Serializer
|
||||
class BaseMetaModel(Model):
|
||||
"""Base models"""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@staticmethod
|
||||
def serializer() -> Serializer:
|
||||
"""Serializer similar to SerializerModel, but as a static method since
|
||||
this is an abstract model"""
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class MetaResult:
|
||||
"""Result returned by Meta Models' serializers. Empty class but we can't return none as
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""OCI Client"""
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
|
@ -1,8 +1,9 @@
|
||||
"""v1 blueprints tasks"""
|
||||
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from hashlib import sha512
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from sys import platform
|
||||
|
||||
from dacite.core import from_dict
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
@ -29,15 +30,13 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
_file_watcher_started = False
|
||||
@ -51,18 +50,23 @@ class BlueprintFile:
|
||||
version: int
|
||||
hash: str
|
||||
last_m: int
|
||||
meta: Optional[BlueprintMetadata] = field(default=None)
|
||||
meta: BlueprintMetadata | None = field(default=None)
|
||||
|
||||
|
||||
def start_blueprint_watcher():
|
||||
"""Start blueprint watcher, if it's not running already."""
|
||||
# This function might be called twice since it's called on celery startup
|
||||
# pylint: disable=global-statement
|
||||
global _file_watcher_started
|
||||
|
||||
global _file_watcher_started # noqa: PLW0603
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
||||
kwargs = {}
|
||||
if platform.startswith("linux"):
|
||||
kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
|
||||
observer.schedule(
|
||||
BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
|
||||
)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
@ -70,21 +74,36 @@ def start_blueprint_watcher():
|
||||
class BlueprintEventHandler(FileSystemEventHandler):
|
||||
"""Event handler for blueprint events"""
|
||||
|
||||
def on_any_event(self, event: FileSystemEvent):
|
||||
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||
return
|
||||
# We only ever get creation and modification events.
|
||||
# See the creation of the Observer instance above for the event filtering.
|
||||
|
||||
# Even though we filter to only get file events, we might still get
|
||||
# directory events as some implementations such as inotify do not support
|
||||
# filtering on file/directory.
|
||||
|
||||
def dispatch(self, event: FileSystemEvent) -> None:
|
||||
"""Call specific event handler method. Ignores directory changes."""
|
||||
if event.is_directory:
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discovery.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
return None
|
||||
return super().dispatch(event)
|
||||
|
||||
def on_created(self, event: FileSystemEvent):
|
||||
"""Process file creation"""
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
for tenant in Tenant.objects.filter(ready=True):
|
||||
with tenant:
|
||||
blueprints_discovery.delay()
|
||||
|
||||
def on_modified(self, event: FileSystemEvent):
|
||||
"""Process file modification"""
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for tenant in Tenant.objects.filter(ready=True):
|
||||
with tenant:
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
@ -98,57 +117,49 @@ def blueprints_find_dict():
|
||||
return blueprints
|
||||
|
||||
|
||||
def blueprints_find():
|
||||
def blueprints_find() -> list[BlueprintFile]:
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.get("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
rel_path = path.relative_to(root)
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
with open(path, encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||
except YAMLError as exc:
|
||||
raw_blueprint = None
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
|
||||
if not raw_blueprint:
|
||||
continue
|
||||
metadata = raw_blueprint.get("metadata", None)
|
||||
version = raw_blueprint.get("version", 1)
|
||||
if version != 1:
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
|
||||
continue
|
||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||
blueprint = BlueprintFile(
|
||||
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||
)
|
||||
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.debug(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
)
|
||||
return blueprints
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
def blueprints_discovery(self: SystemTask, path: str | None = None):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
if path and blueprint.path != path:
|
||||
continue
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL,
|
||||
messages=[_("Successfully imported %(count)d files." % {"count": count})],
|
||||
)
|
||||
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
|
||||
)
|
||||
|
||||
|
||||
@ -171,18 +182,22 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
metadata={},
|
||||
)
|
||||
instance.save()
|
||||
LOGGER.info(
|
||||
"Creating new blueprint instance from file", instance=instance, path=instance.path
|
||||
)
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
bind=True,
|
||||
base=MonitoredTask,
|
||||
base=SystemTask,
|
||||
)
|
||||
def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
def apply_blueprint(self: SystemTask, instance_pk: str):
|
||||
"""Apply single blueprint"""
|
||||
self.save_on_success = False
|
||||
instance: Optional[BlueprintInstance] = None
|
||||
instance: BlueprintInstance | None = None
|
||||
try:
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||
if not instance or not instance.enabled:
|
||||
@ -197,29 +212,30 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
if not valid:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
|
||||
return
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
with capture_logs() as logs:
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||
instance.last_applied_hash = file_hash
|
||||
instance.last_applied = now()
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
except (
|
||||
OSError,
|
||||
DatabaseError,
|
||||
ProgrammingError,
|
||||
InternalError,
|
||||
IOError,
|
||||
BlueprintRetrievalFailed,
|
||||
EntryInvalidError,
|
||||
) as exc:
|
||||
if instance:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
self.set_error(exc)
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Serializer for tenant models"""
|
||||
"""Serializer for brands models"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.db import models
|
||||
@ -10,39 +11,39 @@ from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.authorization import SecretKeyFilter
|
||||
from authentik.brands.models import Brand
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tenants.models import Tenant
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.tenants.utils import get_current_tenant
|
||||
|
||||
|
||||
class FooterLinkSerializer(PassiveSerializer):
|
||||
"""Links returned in Config API"""
|
||||
|
||||
href = CharField(read_only=True)
|
||||
href = CharField(read_only=True, allow_null=True)
|
||||
name = CharField(read_only=True)
|
||||
|
||||
|
||||
class TenantSerializer(ModelSerializer):
|
||||
"""Tenant Serializer"""
|
||||
class BrandSerializer(ModelSerializer):
|
||||
"""Brand Serializer"""
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
if attrs.get("default", False):
|
||||
tenants = Tenant.objects.filter(default=True)
|
||||
brands = Brand.objects.filter(default=True)
|
||||
if self.instance:
|
||||
tenants = tenants.exclude(pk=self.instance.pk)
|
||||
if tenants.exists():
|
||||
raise ValidationError({"default": "Only a single Tenant can be set as default."})
|
||||
brands = brands.exclude(pk=self.instance.pk)
|
||||
if brands.exists():
|
||||
raise ValidationError({"default": "Only a single brand can be set as default."})
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = Tenant
|
||||
model = Brand
|
||||
fields = [
|
||||
"tenant_uuid",
|
||||
"brand_uuid",
|
||||
"domain",
|
||||
"default",
|
||||
"branding_title",
|
||||
@ -54,10 +55,14 @@ class TenantSerializer(ModelSerializer):
|
||||
"flow_unenrollment",
|
||||
"flow_user_settings",
|
||||
"flow_device_code",
|
||||
"event_retention",
|
||||
"web_certificate",
|
||||
"attributes",
|
||||
]
|
||||
extra_kwargs = {
|
||||
# TODO: This field isn't unique on the database which is hard to backport
|
||||
# hence we just validate the uniqueness here
|
||||
"domain": {"validators": [UniqueValidator(Brand.objects.all())]},
|
||||
}
|
||||
|
||||
|
||||
class Themes(models.TextChoices):
|
||||
@ -68,8 +73,13 @@ class Themes(models.TextChoices):
|
||||
DARK = "dark"
|
||||
|
||||
|
||||
class CurrentTenantSerializer(PassiveSerializer):
|
||||
"""Partial tenant information for styling"""
|
||||
def get_default_ui_footer_links():
|
||||
"""Get default UI footer links based on current tenant settings"""
|
||||
return get_current_tenant().footer_links
|
||||
|
||||
|
||||
class CurrentBrandSerializer(PassiveSerializer):
|
||||
"""Partial brand information for styling"""
|
||||
|
||||
matched_domain = CharField(source="domain")
|
||||
branding_title = CharField()
|
||||
@ -78,7 +88,7 @@ class CurrentTenantSerializer(PassiveSerializer):
|
||||
ui_footer_links = ListField(
|
||||
child=FooterLinkSerializer(),
|
||||
read_only=True,
|
||||
default=CONFIG.get("footer_links", []),
|
||||
default=get_default_ui_footer_links,
|
||||
)
|
||||
ui_theme = ChoiceField(
|
||||
choices=Themes.choices,
|
||||
@ -97,18 +107,18 @@ class CurrentTenantSerializer(PassiveSerializer):
|
||||
default_locale = CharField(read_only=True)
|
||||
|
||||
|
||||
class TenantViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Tenant Viewset"""
|
||||
class BrandViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Brand Viewset"""
|
||||
|
||||
queryset = Tenant.objects.all()
|
||||
serializer_class = TenantSerializer
|
||||
queryset = Brand.objects.all()
|
||||
serializer_class = BrandSerializer
|
||||
search_fields = [
|
||||
"domain",
|
||||
"branding_title",
|
||||
"web_certificate__name",
|
||||
]
|
||||
filterset_fields = [
|
||||
"tenant_uuid",
|
||||
"brand_uuid",
|
||||
"domain",
|
||||
"default",
|
||||
"branding_title",
|
||||
@ -120,7 +130,6 @@ class TenantViewSet(UsedByMixin, ModelViewSet):
|
||||
"flow_unenrollment",
|
||||
"flow_user_settings",
|
||||
"flow_device_code",
|
||||
"event_retention",
|
||||
"web_certificate",
|
||||
]
|
||||
ordering = ["domain"]
|
||||
@ -128,10 +137,10 @@ class TenantViewSet(UsedByMixin, ModelViewSet):
|
||||
filter_backends = [SecretKeyFilter, OrderingFilter, SearchFilter]
|
||||
|
||||
@extend_schema(
|
||||
responses=CurrentTenantSerializer(many=False),
|
||||
responses=CurrentBrandSerializer(many=False),
|
||||
)
|
||||
@action(methods=["GET"], detail=False, permission_classes=[AllowAny])
|
||||
def current(self, request: Request) -> Response:
|
||||
"""Get current tenant"""
|
||||
tenant: Tenant = request._request.tenant
|
||||
return Response(CurrentTenantSerializer(tenant).data)
|
||||
"""Get current brand"""
|
||||
brand: Brand = request._request.brand
|
||||
return Response(CurrentBrandSerializer(brand).data)
|
11
authentik/brands/apps.py
Normal file
11
authentik/brands/apps.py
Normal file
@ -0,0 +1,11 @@
|
||||
"""authentik brands app"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AuthentikBrandsConfig(AppConfig):
|
||||
"""authentik Brand app"""
|
||||
|
||||
name = "authentik.brands"
|
||||
label = "authentik_brands"
|
||||
verbose_name = "authentik Brands"
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user