Compare commits
1926 Commits
version-20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
1516fe86da | |||
abad6c181f | |||
312eb70349 | |||
3af77ab382 | |||
72d67f65e5 | |||
ea75741ec2 | |||
aaa9b398f4 | |||
d54d01b118 | |||
f885f8c039 | |||
b5b33ce8e9 | |||
ec5bd550c7 | |||
fe02720f8d | |||
67bd622aa4 | |||
dd18f9cd30 | |||
d36574fc1a | |||
f2754d278f | |||
25ac04f4e5 | |||
ae91689fd8 | |||
aa209efa90 | |||
7e9e2ec53d | |||
77e7c31567 | |||
4b20409a91 | |||
19e04d7837 | |||
352ec55729 | |||
5333050e5d | |||
9c448d74f7 | |||
05a4649282 | |||
97e4c8d5e2 | |||
9681ccd90f | |||
b63420c069 | |||
3d1bf85587 | |||
caad5a888a | |||
a39fef11b8 | |||
8f219a813b | |||
0772756eef | |||
d485a04153 | |||
252e1e8e5d | |||
e6a2b12686 | |||
e2af75e8fa | |||
1c1c1cf5da | |||
a8cd70cb63 | |||
f57b3efcaa | |||
6163f29aa0 | |||
969c733b07 | |||
da25bedc8d | |||
41ed04af6c | |||
32d95b6169 | |||
d4a993d7b7 | |||
f8489387ee | |||
6482a34af0 | |||
3f3ca6fe82 | |||
9d894528e3 | |||
3afff1bae9 | |||
bfd0fb66b3 | |||
b6a57ffd4f | |||
8192b3155d | |||
08d349379a | |||
f852a399a1 | |||
097f48ec20 | |||
5b5a63f167 | |||
9572613c56 | |||
be3cfaee56 | |||
6246537e17 | |||
9545857042 | |||
1ffb7efed6 | |||
e1a49e1f4e | |||
ce0e1c1ef9 | |||
d291d16aac | |||
bcf9a01a34 | |||
aaf58e5741 | |||
b43068bfa3 | |||
bfa78afd54 | |||
782341441a | |||
aa874dd92a | |||
87f65526e1 | |||
af200a6bf9 | |||
ccfd45774e | |||
0b3d91aa27 | |||
6b2ca3d21f | |||
91699cfff7 | |||
43eb8c004c | |||
4c716c1916 | |||
c788a7090c | |||
d49dc07487 | |||
94c4c3c487 | |||
f5394da9f7 | |||
30cb38ac6d | |||
799b9c09de | |||
b5c1d0e029 | |||
745ae864ca | |||
736e7dacc8 | |||
06fd3a582e | |||
4a577decc2 | |||
1410169af1 | |||
85bc35eb41 | |||
7a90b435cc | |||
dc782498b4 | |||
b7faecea12 | |||
5c5cd41548 | |||
9e29789c09 | |||
04f46c1d18 | |||
2824f8712c | |||
383cb9a3ca | |||
d29163e3ad | |||
31904f28ad | |||
15e872762a | |||
599f7e7c88 | |||
e0a7d0b365 | |||
13e5495b55 | |||
b73d34b07a | |||
134051eb39 | |||
72dd758160 | |||
b08ca98d18 | |||
4fd3da7276 | |||
dd8baccd64 | |||
bea89b7494 | |||
bcc7199512 | |||
5f23512aa8 | |||
562496f1cd | |||
8ba2e57f8f | |||
f0c3323cf1 | |||
2e35899a6e | |||
bc6706016b | |||
a51b76d22a | |||
1f5932d65b | |||
4130869435 | |||
739edba92d | |||
0f647faeba | |||
5ee28e0644 | |||
05b9f49e51 | |||
168423a54e | |||
b93d1cd008 | |||
df39ba7457 | |||
14d5967aa8 | |||
6debec7cdf | |||
afc2d5f98b | |||
5fcb4bfe13 | |||
754ea0f702 | |||
542648e2ad | |||
e98dac2175 | |||
94e135ded7 | |||
e467a91f44 | |||
d9f13e89c6 | |||
0472ef583c | |||
cecf7a0200 | |||
6833a84ed4 | |||
06b9574413 | |||
45ab79837a | |||
b2e72ed32e | |||
8bba3c0a9b | |||
9e17b1bad3 | |||
cb1b653d73 | |||
20b129998e | |||
42c21da8b6 | |||
811ff04ae0 | |||
7b3d1a229f | |||
1d99ec95b5 | |||
05fb15ef83 | |||
4050d2545a | |||
05a8c71c3b | |||
594e03182a | |||
139f71e370 | |||
f6cb3d8aed | |||
61d4d5b362 | |||
b097294433 | |||
a9bf3ab47d | |||
4cd19e0a41 | |||
06f91b78d9 | |||
e5a6604c00 | |||
17243e03d9 | |||
6b729d6a9a | |||
0f8ece2575 | |||
fc9f5d7f19 | |||
003bc6151b | |||
9ae3041542 | |||
51e570ec5a | |||
3040294e17 | |||
7cd7017d5f | |||
8ccaebb7ac | |||
02ca1b7189 | |||
523c2a114b | |||
a1ec7c071c | |||
7567ff1c44 | |||
a2755471ea | |||
c927becf98 | |||
509e340690 | |||
8b4afc4fc1 | |||
15f874b9b5 | |||
3a106061e1 | |||
e9ace3bb23 | |||
9bdda11c88 | |||
8fb0d7be2a | |||
4693c50701 | |||
1e6e99b5d3 | |||
a07215d2e2 | |||
afb3438932 | |||
fce655eda8 | |||
4113ff056a | |||
cf162cb7ca | |||
59771633cc | |||
a8e7339c28 | |||
c8cc31ced2 | |||
eb5fb15d84 | |||
3d6f1f37ca | |||
81451dd7eb | |||
d54d5d89ac | |||
8356f83738 | |||
23ce63fb2e | |||
3936e308d0 | |||
2c47e64a50 | |||
a05baaf169 | |||
39574dc392 | |||
d744c0b829 | |||
c5222bf439 | |||
f6b144a0fa | |||
7124a620af | |||
0782b3b0fa | |||
06091364fc | |||
287cf6f0c7 | |||
be8b0feaab | |||
50b2124b5d | |||
c59db2c178 | |||
8da1a6699b | |||
00fae2353c | |||
efc660938c | |||
9265555550 | |||
dd82605178 | |||
9e34a74a48 | |||
81db2cf114 | |||
5755a9a7c0 | |||
1451f3757d | |||
90d4750f01 | |||
274204ef2c | |||
49b9903d1c | |||
6c53494f87 | |||
f9c3f16ed7 | |||
4c27d97503 | |||
eb395db951 | |||
c7537f9f32 | |||
e74842ffe6 | |||
90f4ebfcab | |||
f38fdfff6e | |||
198e6b0d2e | |||
fc87e09418 | |||
8c3300592a | |||
7552fb2eba | |||
8bb28c5b2e | |||
3f4e17a6b8 | |||
c672d763e1 | |||
e191cd6e7f | |||
cc6824fd7c | |||
30d32022e5 | |||
887c21ac6d | |||
c0474a83d9 | |||
b8dbde3c51 | |||
0471846c5a | |||
8e2a6f1101 | |||
3469db7fd5 | |||
2d7a0bbcc3 | |||
78cebf0b21 | |||
8079952d47 | |||
561e6956fe | |||
10b0c84d97 | |||
5139656e95 | |||
1b12c90f32 | |||
09907ecb6a | |||
33e7903699 | |||
9dc37eb30e | |||
cd4cf63ab3 | |||
fdf726f771 | |||
530ac43b8a | |||
2ac7eb6f65 | |||
2340e925ee | |||
3f02534eb1 | |||
033ebf9332 | |||
023439dce5 | |||
f37be37842 | |||
236116cce5 | |||
1ba1a1def5 | |||
782d95b4a3 | |||
5803c39e91 | |||
e5322a6dd3 | |||
364edfb4a8 | |||
de16988cac | |||
a2714ab1f1 | |||
5347dd7022 | |||
aaddb76962 | |||
b08f8d8e0c | |||
664bc19bba | |||
f315360be1 | |||
4ac255d579 | |||
3f9f57f0fd | |||
3569eb15b1 | |||
94836a3ce7 | |||
f272d14fcf | |||
17fe595528 | |||
3cce6d79eb | |||
7ac5c8eaa6 | |||
7316f126de | |||
d645965a33 | |||
47abbcf8b8 | |||
e86a41b83d | |||
f2293c0f5b | |||
da3393abb4 | |||
211da35a93 | |||
0b8c501326 | |||
18472c231a | |||
e51bef218a | |||
486e17920e | |||
505bad0895 | |||
e4b7691181 | |||
ba5adad53d | |||
2b1dee6aed | |||
b976acff42 | |||
78092ddfea | |||
22d013817f | |||
56224fc712 | |||
86d64b2234 | |||
a320aec9d0 | |||
7be94df00c | |||
346c6e6a85 | |||
8d4b7ce8d3 | |||
56cf14e5ef | |||
69543c14d3 | |||
f3f07f2c98 | |||
4647fbacb0 | |||
4359fab560 | |||
f8b36e1737 | |||
c50148072e | |||
deda3a57ee | |||
8f0c0fae62 | |||
2015463fe0 | |||
d435a65cfd | |||
a728dad166 | |||
e0564b3770 | |||
d50f92d8b4 | |||
03f3ad89df | |||
e604e70395 | |||
1db048bdaf | |||
3d973e7ce3 | |||
9bc3327f03 | |||
f1979e12cc | |||
121cc6ac98 | |||
9b7c30d44c | |||
82935ddf11 | |||
989ff5a464 | |||
0b5870f16e | |||
36e16a270b | |||
09ffdea1f0 | |||
2889974e73 | |||
15ce7423f6 | |||
d12db62a6d | |||
546425acde | |||
7e46af3f45 | |||
2f469d2709 | |||
fb4e4dc8db | |||
60d5936d73 | |||
28d9d4a16d | |||
31913a620d | |||
2ac38869fe | |||
9601d00a31 | |||
e4358dafd7 | |||
b144d28805 | |||
e103eb9369 | |||
e9dbab011f | |||
1ca3f15398 | |||
b6e8342466 | |||
c1eef9278d | |||
12c4ac704f | |||
14ebd55121 | |||
5c7384eecc | |||
cfbf7d3a9a | |||
f0cf4ba5d8 | |||
e207e8dff5 | |||
c70d3bd182 | |||
84a5e6a487 | |||
3a527b7680 | |||
a1c2931b3b | |||
e67c0c2144 | |||
5f8c06a088 | |||
b5fe8afd27 | |||
d359dc5b09 | |||
2e63a7c7e9 | |||
41af486006 | |||
cf799fca03 | |||
db4f61549d | |||
27879d9d95 | |||
1029b897ea | |||
85d1993ddf | |||
de9ac08d91 | |||
9a06908984 | |||
911d7f435b | |||
7eef86a3f4 | |||
77662c9a51 | |||
ca25c46ee1 | |||
59ae774712 | |||
c350560d59 | |||
810a4fd14a | |||
b4a1a1e664 | |||
5ca65003f1 | |||
b0bce60e5e | |||
ff9b48a2d4 | |||
8f1785924f | |||
af25ba7508 | |||
8ccd500d5b | |||
40709e93de | |||
31cabbd64c | |||
f7a0163a70 | |||
0db1d9598d | |||
db8ae4e0f1 | |||
84542080d6 | |||
a95ce95b50 | |||
e655683eec | |||
443b572413 | |||
6836ba2226 | |||
1e3c9c26ea | |||
145f011eba | |||
095b5bfc78 | |||
15d9f39a9e | |||
9d07f1e83e | |||
f4e94bff1f | |||
6345c7fa8e | |||
2e9dc2d5ea | |||
8f05f4d29c | |||
5b2496c190 | |||
6893356c30 | |||
943608e554 | |||
6c065bd7e3 | |||
dfff445ddd | |||
e08f8d5fb5 | |||
30a7a6cbe9 | |||
d6af506a78 | |||
57893e0125 | |||
080ac6b5bb | |||
d2c4bcf25d | |||
c3560c3f05 | |||
50bbb0a9d2 | |||
6839c5b750 | |||
622c0faebf | |||
935821857a | |||
5fe737326e | |||
ff0d3c3d63 | |||
fcdf165dfe | |||
ae7ea4dd11 | |||
0c917ac3ed | |||
657c17a12f | |||
8828eefbe4 | |||
02063f7d92 | |||
24244d6ff4 | |||
4e5ea05987 | |||
f8be8f2268 | |||
7db9ced218 | |||
a1bb9661e0 | |||
87cc649e17 | |||
2dcf72603a | |||
ddbb8e1041 | |||
422e12efea | |||
e46171ddea | |||
e2bfcf8a6d | |||
d22d147c8e | |||
786a84640e | |||
4e3b3ec6a8 | |||
13ac4cb264 | |||
79d4fbd06b | |||
6404850ba5 | |||
b76f814e5d | |||
d14a2906f5 | |||
2ca0e9da7d | |||
75ef67e456 | |||
43fdd07133 | |||
e244cc499f | |||
355ea7dd6e | |||
5975bb8362 | |||
93de9b6649 | |||
7f80d3d152 | |||
799d958c68 | |||
0393c5f662 | |||
51e5047c89 | |||
c0d30d3730 | |||
6931286814 | |||
a854e6b16a | |||
ee9609c8d2 | |||
287394c349 | |||
ba3e78c75a | |||
fb8c4b97f4 | |||
c67a48a23a | |||
e928e41bb2 | |||
c451d8c249 | |||
4830c80065 | |||
18c62092fb | |||
2a315a9524 | |||
da5f136221 | |||
26e9c9b1d7 | |||
81fdbab902 | |||
01e254e08d | |||
f306fb9c26 | |||
ad81ee2740 | |||
04d0bd7fb7 | |||
ab9f819baa | |||
6ce09902ff | |||
2bf2f5ba2e | |||
e712225ced | |||
a987846c76 | |||
35e2b648ba | |||
6d036876db | |||
4657a7f749 | |||
f41609e1c2 | |||
7deef8d4be | |||
18759a7e87 | |||
81774af33e | |||
244454c8b1 | |||
91d1f3cbe2 | |||
8bd23f1686 | |||
863454a895 | |||
416f916da6 | |||
6bca075446 | |||
ba90e660fd | |||
a4364c0846 | |||
a127486784 | |||
221c01aa82 | |||
def30bedaf | |||
422b19df60 | |||
77d20e82f4 | |||
eab767fc1b | |||
0c597004f4 | |||
bcc855aad5 | |||
bb34cd0200 | |||
4bd66aeea9 | |||
f48663a39c | |||
f7d21b3aba | |||
97b64c0011 | |||
29892c2bde | |||
850e47f8e1 | |||
3565650f3c | |||
61d6a6e96c | |||
579b4b6fc8 | |||
0315c19eb6 | |||
9c8a230df1 | |||
6be43d934f | |||
3650a0747a | |||
b0fbd576fc | |||
f099bd764e | |||
724bb59c0e | |||
b163c38cc5 | |||
37b04c6f38 | |||
6f1e14838f | |||
b1de0b767e | |||
eaedcafd58 | |||
469899233a | |||
17fbba2799 | |||
8bd5a11f40 | |||
51571b4e06 | |||
fba51f9454 | |||
f858e5498a | |||
9519c4023e | |||
bd9bf59073 | |||
9ceb8acb55 | |||
93575a9966 | |||
5e30f46772 | |||
a4d3b5f6fb | |||
9b811dfc81 | |||
4e745a382f | |||
01311929d1 | |||
962cbf9f6a | |||
c7ae675795 | |||
799d38ed83 | |||
50512c5c50 | |||
cb16578063 | |||
f6181ceb70 | |||
a5db60129d | |||
0bebcc4eff | |||
f66020f0b1 | |||
edcbf17553 | |||
60c9565417 | |||
26e7e58072 | |||
b744c5fcfe | |||
de06e68ab3 | |||
a4e04fbffd | |||
d536d890de | |||
a4c01afb2d | |||
323fd74580 | |||
500800dafb | |||
e2f53c1922 | |||
2fb8d4b410 | |||
22b6a1fd7b | |||
95774c4cb7 | |||
f179d6572e | |||
50fd93b7cd | |||
9e35e5e2ff | |||
9e8e2985f9 | |||
f04e12725c | |||
7029f5bc06 | |||
a90acb1240 | |||
3e55428ff1 | |||
ae9e329857 | |||
a4078c4971 | |||
bbcda86002 | |||
f16f2c28a3 | |||
bc24c6fcc9 | |||
346c2e2f8f | |||
e24590fd07 | |||
17d069dd45 | |||
74305c75d0 | |||
6283bbb0c1 | |||
904642d747 | |||
c2ae679909 | |||
5963c87aed | |||
91753a9709 | |||
6d7ed08e70 | |||
fa145393e4 | |||
16454af1c0 | |||
c71a70a2e5 | |||
f8e07b5008 | |||
cb0e776cc8 | |||
8b4d149328 | |||
72b07e830c | |||
dd36a521f9 | |||
20442c6b36 | |||
1413b52800 | |||
de9c35c2aa | |||
05d73f688c | |||
e5576d486b | |||
029395d08b | |||
8ddefb213f | |||
e679066fca | |||
1ae36092c9 | |||
51f4d4646c | |||
c45e92b17e | |||
4741d8aa0d | |||
27be9faf40 | |||
932721dad6 | |||
9ca227216b | |||
9d9b0837e2 | |||
76f1e0b359 | |||
ee33e2a28b | |||
0041cf88f4 | |||
587385587c | |||
46090f81cd | |||
f81af066bc | |||
2504c6eee7 | |||
698178697b | |||
9c02cdbb56 | |||
fd17c0c7b2 | |||
52cfd0d46d | |||
54ef88a6fa | |||
bf1a363124 | |||
e573b3a29f | |||
6741439367 | |||
8061d32d2d | |||
e897ea6080 | |||
0267e0d9dd | |||
e424fa56d1 | |||
a2de6194e4 | |||
13077d503c | |||
0fbe7bfe8f | |||
19d17d80ae | |||
797ca0d9c2 | |||
f2e6187e5a | |||
5a581b123c | |||
388f9678e6 | |||
52ce0a2df7 | |||
d85a39d6cf | |||
d0f0f9b29e | |||
5ede4c203a | |||
de0cfb6a69 | |||
913c295015 | |||
56324d198a | |||
0ce41a1b2d | |||
69f0460f69 | |||
d9eaefa68a | |||
f2ebef127d | |||
dccebb6934 | |||
33c57dfc19 | |||
e5330a9582 | |||
809b6fa105 | |||
ae75722a74 | |||
b1de9f8d93 | |||
ea1f92cb05 | |||
d7639f3a30 | |||
6ceb59c784 | |||
59ee604378 | |||
45dfd8ac92 | |||
f679aa8cf4 | |||
5b6b2b56e3 | |||
1a81c6def9 | |||
d14d8ad060 | |||
0bc6c597f9 | |||
c64d2c9224 | |||
7899ee17d1 | |||
7b2410d567 | |||
937739a44c | |||
44a057ed9c | |||
afa8a505ee | |||
5d87eb97be | |||
48ba1af481 | |||
b5850220d6 | |||
b01abf9ada | |||
aca105bd01 | |||
0a1d0b85ca | |||
be85eecac5 | |||
7daf89be05 | |||
24385c9c68 | |||
570d9afe1d | |||
e141a11475 | |||
b055adec2a | |||
772acb10d6 | |||
b6d338659f | |||
4dd49f9b62 | |||
fd4c5f5ce7 | |||
165305fbfe | |||
6c03126076 | |||
9cd5c5f30f | |||
0d30f618f7 | |||
aa2f0c074d | |||
a7bf963409 | |||
317afc932a | |||
8daa8e1ca1 | |||
0f78db65a9 | |||
4e741416d8 | |||
87f3484be4 | |||
0b25c612c0 | |||
38356ac1dc | |||
f0619814f9 | |||
d09bee7bf9 | |||
81c22fa22a | |||
47a916ad5e | |||
4a41811465 | |||
8dbfafe612 | |||
b6160cf759 | |||
4118a34ed9 | |||
9f78d34719 | |||
21d5059876 | |||
4093b2b71f | |||
0d974dd0e1 | |||
0138aef70a | |||
d063fcb117 | |||
3e64409fdb | |||
ce96600adb | |||
e8c2aabad0 | |||
5e5a74eebf | |||
fa87519536 | |||
60e911baf8 | |||
a8067c1f0d | |||
f8ca498c77 | |||
489a680ff4 | |||
6c3a1795dd | |||
5b0cc3672b | |||
1ce482911b | |||
c869f3a3e2 | |||
2236eaccbc | |||
09fea420dd | |||
5c3295f4fd | |||
41de8f1191 | |||
0deaf25b1f | |||
47d5fc26cc | |||
9a996e7176 | |||
554a26442d | |||
573517bf0a | |||
2cd68dfa87 | |||
8029a13be1 | |||
22ee587e9f | |||
7c9659dd24 | |||
1ba734cc7b | |||
7c43c1a05b | |||
4230d8ee20 | |||
d590c1cdc4 | |||
ac843bb8ce | |||
71ba5be55f | |||
7358553333 | |||
d53d212377 | |||
9a39696367 | |||
6766b12bd1 | |||
c1404285bb | |||
8bba8422d7 | |||
ffcf8b110b | |||
894b4e3ca7 | |||
7c7957f160 | |||
36340d0960 | |||
9f9a71f3d6 | |||
0d0bb1a559 | |||
e3e1fbad3f | |||
91f0d31175 | |||
8af9eca24c | |||
1ee78ff1f2 | |||
618a61af04 | |||
44341f0224 | |||
444deae637 | |||
ba0e64d304 | |||
05fd539db5 | |||
3dd200dbe5 | |||
411ef239f6 | |||
25840ce04e | |||
bb64fb1130 | |||
5d5938c412 | |||
d8de60b053 | |||
b4a3b266b3 | |||
65c02c9ad5 | |||
e4d8612088 | |||
c2b26718f6 | |||
300901e93f | |||
33386b126c | |||
1bdc0b5e65 | |||
a308cfedf3 | |||
3236f57f7b | |||
0a4792cf95 | |||
6af85b002f | |||
30d2c4fcc6 | |||
6900ffffd8 | |||
873aaf85f9 | |||
9c69f67778 | |||
6cf7a72831 | |||
7e3b325929 | |||
b916b612c7 | |||
b7c5fc3f1e | |||
a3ac5ec183 | |||
d30379ba93 | |||
12815526c1 | |||
ed2f0a2d5e | |||
536d776d02 | |||
f70d6432e7 | |||
cc08bfb18b | |||
79dcc30778 | |||
68a1bcf233 | |||
cd7de4c0b9 | |||
3195a75b9a | |||
886d7832df | |||
a3595a36d2 | |||
28ac00798c | |||
f4b0d6e85c | |||
daa3c91afc | |||
5eba598584 | |||
a6b16ecc68 | |||
a41924939b | |||
0afd3b121e | |||
a58374f065 | |||
8faec99bd6 | |||
6c27a2f783 | |||
78e4c313b5 | |||
f8140e1543 | |||
4405a04b0b | |||
7eb5fa6a24 | |||
216e8aff64 | |||
c91bbecb7b | |||
084b3eb039 | |||
557aadecc0 | |||
33b8c45f5c | |||
52132112f6 | |||
ff1510dedc | |||
c3398004ff | |||
47f09ac285 | |||
259c87fa37 | |||
80bb6c6274 | |||
f8c2fc2ead | |||
ea84ee0222 | |||
a695ffc224 | |||
9e22f007a8 | |||
6299fc7f81 | |||
a032fd529b | |||
ec78e56fbd | |||
ad7dedb61f | |||
8356ceaead | |||
228197ea5e | |||
27d3d4a534 | |||
f2dcbf9b1d | |||
03e39a6557 | |||
454a09d91e | |||
61434c807d | |||
7265a56f05 | |||
95df14106c | |||
91d78b0c7d | |||
6c492fbeee | |||
f7ef8c89c2 | |||
c6c460fb48 | |||
78ecbc097c | |||
7fc350bb0b | |||
3bada52fd6 | |||
847fe6ddee | |||
312f09204b | |||
d76c823268 | |||
c8e074c363 | |||
906faf9cce | |||
c68a42f63b | |||
fd8c1d41db | |||
3704f4ccf4 | |||
eb071d4d90 | |||
1c04dc0986 | |||
639a5c429c | |||
35bae56486 | |||
0a8de6499c | |||
f164fff2e7 | |||
51a56942bc | |||
92fd6a55db | |||
b5b1ed5887 | |||
8ccdbdc370 | |||
ac57d6e820 | |||
eaa3d11df8 | |||
bb0eea1f39 | |||
87f9f85c6d | |||
4728a444b7 | |||
4d58eba027 | |||
35fa8ca3d0 | |||
cf07e930b8 | |||
afd155bbba | |||
0b0beecb49 | |||
0644a5ee3a | |||
f3b4e55af5 | |||
9c25d72d61 | |||
ad2d38fa4a | |||
b1b0cf8a87 | |||
f47b208433 | |||
b958868ea7 | |||
5fd414576b | |||
9d9616138f | |||
99e2c6911c | |||
0fa3fbf416 | |||
5ea54e8f7e | |||
8215ee19c6 | |||
9bddc9b577 | |||
c10a8ecf51 | |||
7acd0558f5 | |||
9f4be4d150 | |||
af9766972d | |||
9efc06e473 | |||
d1566acb4b | |||
3c964a3e71 | |||
01cfec62e7 | |||
cebef6a596 | |||
5fe372e84d | |||
d5a3a7552a | |||
ab17f37f0b | |||
ee883ceccc | |||
7df0e88b9d | |||
53f827b54f | |||
395dc08f05 | |||
080f2ab5e7 | |||
2a2e159a0d | |||
564b2874a9 | |||
8ded11806a | |||
36bd4b1e51 | |||
95a679ab3b | |||
5ca8eefa8b | |||
b0f5c9b010 | |||
6ae9071368 | |||
ab795e6642 | |||
b7b62ba089 | |||
7f0ccc61dd | |||
d5abaed66a | |||
64d611212e | |||
9e9769d7fb | |||
5aa744edca | |||
0a7e2e9f81 | |||
f43c0bc798 | |||
ffd3924095 | |||
ed275bce4a | |||
b99ce890ef | |||
5509bce3d7 | |||
a3f1e7a4d1 | |||
17fb4dab34 | |||
c0f3b56012 | |||
53415d8af8 | |||
ed99b3d98f | |||
6373dd2053 | |||
3f607ee2c8 | |||
da6e74a353 | |||
9b879989fe | |||
b1508b9d01 | |||
4601864f94 | |||
a2994218e4 | |||
0ae53b1ce8 | |||
d5fa9da444 | |||
91da421391 | |||
a1e67377f9 | |||
5ad379f54f | |||
0be95d377a | |||
4da66cdb6b | |||
a28b888ca4 | |||
5ec008d0d3 | |||
b06dbab4ac | |||
ab4d7ba2f0 | |||
ea806daf3e | |||
27e5f45919 | |||
8b17ab9bb0 | |||
9283e02808 | |||
d6b5359b8b | |||
77657b1f33 | |||
131a43033e | |||
fef841a458 | |||
bb8b87fcb3 | |||
f36a5a053f | |||
cc8f52b502 | |||
0b0e08446d | |||
1913b5ec41 | |||
a8332eced6 | |||
af7cc8d42d | |||
5830781a5a | |||
a7f324b96f | |||
494cfc2fea | |||
3af27323de | |||
8a6febaa02 | |||
ecce31ee87 | |||
967a38b7ac | |||
9d1ad104ec | |||
01663468de | |||
5e7731a4aa | |||
cb0fa6beb9 | |||
6f67366dfa | |||
8b7922a5cd | |||
dea44fc74d | |||
dfe8a98849 | |||
54d508ae8c | |||
7b0d8f8991 | |||
b058906074 | |||
4b0566c9d1 | |||
40dfa920e2 | |||
187d5e9b4c | |||
147312c160 | |||
4426cbec34 | |||
e05f028c0a | |||
58a5c69f49 | |||
8c7c60b271 | |||
d8c243bcd2 | |||
f7cc4349d7 | |||
13591fc72c | |||
b604ff5114 | |||
f72fa41a75 | |||
adf4191066 | |||
d2de586cc9 | |||
dad5021870 | |||
ab3f993bb9 | |||
158fe2f9bb | |||
5970a6e2a2 | |||
5c8f024d12 | |||
428daa5323 | |||
4001af4d35 | |||
f1cec03dcf | |||
574ed72b95 | |||
480f5c2aac | |||
d4e502fdf5 | |||
05b2fb5ec1 | |||
bb92c4a967 | |||
b40caf12df | |||
8ebd2d14b4 | |||
445bc05b67 | |||
7538b2f860 | |||
367f86ecfb | |||
055ead54b5 | |||
df0232358b | |||
baa3ea6585 | |||
e75e2cf324 | |||
948b83a2b2 | |||
34e9af57fe | |||
94ae490284 | |||
690f263bac | |||
6280446450 | |||
7d87f86410 | |||
0d1201f972 | |||
78b23c4bd4 | |||
7fcfc48af2 | |||
611fd96e3a | |||
4671d4afb4 | |||
07c4ef986b | |||
7d64ec5066 | |||
ee6edec1d8 | |||
04cc7817ee | |||
9ac6511548 | |||
2eee53806a | |||
c5af79f176 | |||
0477862b73 | |||
5ef5213fae | |||
6a554ef45a | |||
f44175303b | |||
dfa80543b5 | |||
5f99887b50 | |||
2502a7cece | |||
77025cdb79 | |||
ce5f6d5d43 | |||
1893626e04 | |||
edb2aa2db5 | |||
9e539d0a0e | |||
a3088b7f79 | |||
b186e35b61 | |||
2a3933f141 | |||
2f2eec0d21 | |||
9faad8a055 | |||
a94392808f | |||
c4998e7dd4 | |||
1ab587d80e | |||
5715ffd845 | |||
8c3834e6b2 | |||
f841586153 | |||
b8b681250f | |||
3ab9ee5acc | |||
1a4c640835 | |||
38bf0ee740 | |||
520fb2fac1 | |||
95adc38ff4 | |||
55ad2d7eab | |||
8160663214 | |||
6a700cb376 | |||
e123afd9ee | |||
96e732e45b | |||
6349ab60e7 | |||
2b0749af6b | |||
a5098364eb | |||
71820191a3 | |||
c08c849fec | |||
6a74fa11c6 | |||
7841720acf | |||
67644ace87 | |||
f84a10b59b | |||
200d6d6adf | |||
d0f1ebfad3 | |||
7d849d7bd7 | |||
f1dfe04786 | |||
4d7d2b8d3a | |||
a6cc0f189c | |||
18a4eac527 | |||
6dd2e2b85f | |||
7bfea87864 | |||
1ca8feb5fc | |||
c1615d044b | |||
edc9d60e22 | |||
e6b135d535 | |||
8cfad9a854 | |||
2237358633 | |||
d15cd9ce5f | |||
62abe22673 | |||
8b78570597 | |||
549e4dcb94 | |||
1480ff6732 | |||
0e1000764d | |||
8dc9b43bb5 | |||
3ce0aa54c7 | |||
b5888e79f5 | |||
25d779e879 | |||
d1fbb85821 | |||
ea307689d4 | |||
7a06c1685b | |||
977757f561 | |||
c117d98e27 | |||
711e98d049 | |||
f84c176bd0 | |||
c4b11ca861 | |||
132a353b92 | |||
bb464aad50 | |||
ab27cd0a9a | |||
241280f2b5 | |||
d110b5b661 | |||
8871a4acb2 | |||
a1ad357abd | |||
81f9842797 | |||
712256cdfe | |||
fb4808418c | |||
7c7bb9dc2e | |||
9a3809135e | |||
de13265997 | |||
0228ea9a4c | |||
faf986c231 | |||
315eae009f | |||
02f75a92ce | |||
a92786e153 | |||
157c23946e | |||
f6b33d65af | |||
ce461631b5 | |||
2f106a9049 | |||
7038431e19 | |||
3fd9b53fe6 | |||
e542783fec | |||
adcd11b1f8 | |||
6192d01b7e | |||
fd2677af1f | |||
5947c7b97e | |||
986d7bf714 | |||
6282e923d6 | |||
88b4125a6a | |||
208c2d1913 | |||
54dc0a46b4 | |||
fc807744bf | |||
9666d407b4 | |||
75510ead84 | |||
73bf6fd530 | |||
2e5a33f0c2 | |||
8c33d13dff | |||
a70de69228 | |||
ab2d39dd2a | |||
2084156f1d | |||
1d2725825c | |||
b9754f9c13 | |||
bb2e5b4861 | |||
89abc99dc0 | |||
f92c661d09 | |||
3468afc399 | |||
a286ae276b | |||
4fdd978b57 | |||
c52bd8c4b9 | |||
ca5ae5f914 | |||
4604c92046 | |||
4218ece2a5 | |||
0d6481c4d5 | |||
a7fc579202 | |||
5600261852 | |||
824737965d | |||
5476f517da | |||
d38043fe72 | |||
102570c61a | |||
238e6e3f24 | |||
89c7e61769 | |||
b097cf4d7e | |||
5c0d7f9a58 | |||
95b99e3e55 | |||
6437fbc814 | |||
d6fa19a97f | |||
1957717160 | |||
94a93adb4b | |||
5d84f2a079 | |||
5b9f35a4a1 | |||
b3dd87bbab | |||
af7189953c | |||
35d2e9cd5f | |||
9a52d8db83 | |||
14f0034a0a | |||
20522558fe | |||
f00ee5c174 | |||
95e24c9ec2 | |||
6b42e404bf | |||
9abd4b3e14 | |||
865138e7e7 | |||
7524413b22 | |||
70bdbfd5ef | |||
73a7c0c559 | |||
cafff808ab | |||
bbbbc2a718 | |||
1452f2680a | |||
dd39aab1fb | |||
524fbd5838 | |||
bb7c3456fa | |||
b611fd10a2 | |||
65b1cbc010 | |||
119f64159b | |||
1352ed7e44 | |||
34ce85fcd1 | |||
977ae4f225 | |||
a464ffe846 | |||
6757d43d33 | |||
da3222df07 | |||
54cacd784c | |||
32840d3909 | |||
eb78632853 | |||
4868d4a14d | |||
3f5effb1bc | |||
84c2da8a6e | |||
56744659e4 | |||
bad7deb52a | |||
5748e19845 | |||
16a03160d0 | |||
a566856b65 | |||
8b52d711e8 | |||
4da18b5f0c | |||
63e3f6545b | |||
e35c3d19bc | |||
ef028af7d1 | |||
b69c26d485 | |||
e13cfec84f | |||
97df7848a5 | |||
e2d3a95c80 | |||
bebf18f257 | |||
53e68b8540 | |||
9dbd54690c | |||
9e41b7d208 | |||
1c66d420c4 | |||
0ca913f8d4 | |||
b97274058c | |||
aef0333695 | |||
c847b16b3e | |||
e2e83f5631 | |||
8363016982 | |||
397b9845ec | |||
b9da24c952 | |||
1053962bec | |||
19ff8129e5 | |||
40cdf6877d | |||
2a399cf8e8 | |||
345fa1bed6 | |||
70ffb6d49e | |||
3ecdcebd35 | |||
4f02c8ab98 | |||
41974c3f82 | |||
808f697423 | |||
a9dc3ff0d8 | |||
acde584cbd | |||
df52116135 | |||
eaf56f4f3f | |||
fd9293e3e8 | |||
520de8d5b0 | |||
bbdb0df42e | |||
9310d4cdc0 | |||
86f9056d3f | |||
5375637eda | |||
109f06c3ae | |||
a3744da3a5 | |||
ff1feb653b | |||
4a11d89a08 | |||
73d7b5f110 | |||
8b7a92068b | |||
ff1532da13 | |||
6eafa2346d | |||
681644b854 | |||
de4d388e0a | |||
cbe2cb51e7 | |||
9176c71075 | |||
1c05e4ca09 | |||
2d55d3c743 | |||
0a9482b28a | |||
4b1440944e | |||
75794defc6 | |||
59a92dbacd | |||
b81ddf2b80 | |||
9ccd1ce08b | |||
6f6d22da13 | |||
095850f038 | |||
b46048e74f | |||
bf7dc5df78 | |||
f0d0abb66e | |||
fab6a8f8c9 | |||
61bf73d2f9 | |||
9219abf84b | |||
178bfe1d44 | |||
afb7f8be3e | |||
ba08060337 | |||
26243c05ed | |||
56375d7245 | |||
94f22cffba | |||
10b7d78825 | |||
7618c2e45f | |||
e13615c1ae | |||
06fb81410b | |||
5732fc0c2e | |||
59e54901fb | |||
0ef333f8ea | |||
12ef7e2fae | |||
397d6ff059 | |||
6469698261 | |||
3b733e98fa | |||
9e855d1f0e | |||
a2ee76b328 | |||
86bb2afd02 | |||
9b8c0e3924 | |||
d11ee46589 | |||
b6b820f6f1 | |||
e28f897cb1 | |||
964f095630 | |||
610012fcc3 | |||
3c970a135c | |||
24d7cebbe7 | |||
618527b51c | |||
4afcc240a3 | |||
26308ef62b | |||
36ed62142d | |||
6ae2fc9668 | |||
34f01d3731 | |||
67f3db1e03 | |||
36f92f01de | |||
f19c143e95 | |||
9559bc2e1e | |||
41d17dc543 | |||
885aeddbdc | |||
033e315035 | |||
4539954173 | |||
f54351fd57 | |||
b61655fe4f | |||
2a74f5e91f | |||
7ea3fd6482 | |||
c02e2c22ff | |||
d834ec4db9 | |||
f6a8b3d568 | |||
c4a7648ce3 | |||
c0144c9bc1 | |||
28ddeb124f | |||
dbc07f55f4 | |||
0d7c2d8269 | |||
879ea8ed62 | |||
54c76735e2 | |||
b6f5fed121 | |||
e08536af33 | |||
2c32e54746 | |||
9370d155f8 | |||
e47bbe63b8 | |||
972dce1462 | |||
7b44d8972f | |||
ba9cafecc5 | |||
e0f4f2c80a | |||
e715f1fbbb | |||
dde9d02008 | |||
a6eba37d5a | |||
2eb7c16a9a | |||
87fa50c492 | |||
9042664fcf | |||
e8a53041cc | |||
20e971f5ce | |||
6f2f4f4aa3 | |||
66e8748503 | |||
b5c8fa24a2 | |||
335e124c0a | |||
1faf3c66c7 | |||
7810063ca0 | |||
980320e24b | |||
118765ab30 | |||
5e60db8593 | |||
e81a065855 | |||
39d0893303 | |||
99ddbf553c | |||
799b509ac6 | |||
bc6b591dfb | |||
ef6a799533 | |||
26de143bf8 | |||
ad46b3f05c | |||
0462afe964 | |||
1100b98596 | |||
89ccdfcf6e | |||
a328d2d68a | |||
d5a94ea687 | |||
596ff529c4 | |||
612d1c76d4 | |||
886749dcb2 | |||
26f3275361 | |||
6441401d94 | |||
b7e4ad7234 | |||
9c82024fd5 | |||
685709decb | |||
6ab2afc8d4 | |||
23ad132f74 | |||
87164f5cdb | |||
d6056755b3 | |||
36229f4224 | |||
80f4fccd35 | |||
c6a14fa4f1 | |||
c6235e0f1e | |||
7c946c1cbe | |||
664d8646bb | |||
1aba27c84f | |||
008729700d | |||
9e1cedbece | |||
7503b32c74 | |||
383b6a38ba | |||
7d9eef37ed | |||
60d3da20f3 | |||
cd99b6e48f | |||
51c6a14786 | |||
75866406dc | |||
122055b38b | |||
e68e6cb666 | |||
b61d181ec7 | |||
c4e24c04f6 | |||
47e663f48c | |||
1f7178c3a8 | |||
cfa2edebcf | |||
175502b053 | |||
2c78053631 | |||
53c03f3635 | |||
6c72c97513 | |||
5748b29c03 | |||
87ee4635b2 | |||
9e82de33e6 | |||
8829f76183 | |||
f6165bac8f | |||
84bd6131a1 | |||
6d1de4bbd9 | |||
5a8fbc2f95 | |||
d2cfb76a7c | |||
f70be86ddc | |||
f5eb414d14 | |||
327d87355d | |||
b415e9b773 | |||
b203de7a26 | |||
ee65877956 | |||
c5097bfc5a | |||
febb6f57bd | |||
843cbd4674 | |||
a4a82bd041 | |||
ca2a59281a | |||
6f1721a728 | |||
99baf1a29e | |||
a68fa06ff9 | |||
9f431396c0 | |||
1ac2e924a2 | |||
0874574e5c | |||
069e9c015b | |||
8de4471322 | |||
c6ead3dc49 | |||
f749027143 | |||
153bd3aaf1 | |||
e19c4886fe | |||
1a57d453ba | |||
e5dfe7dafe | |||
bb190852a5 | |||
34a2d105d3 | |||
80601e16f9 | |||
ff6f9cc44f | |||
e0f60e09cf | |||
176aa606ca | |||
17364c3bd8 | |||
d842fc4958 | |||
19f5e6e07e | |||
acfa9c76d1 | |||
bff34cc5dc | |||
7f009f6d02 | |||
dfb9ae548c | |||
7d6b573f8b | |||
ade397fc24 | |||
d945d30cda | |||
c8c401e2c5 | |||
e4ca20bfc6 | |||
6347716815 | |||
859b6cc60e | |||
06a1a7f076 | |||
b6c120f555 | |||
6cc363bc5d | |||
80de3ee853 | |||
c340830b37 | |||
472b9ea866 | |||
4c5b07a091 | |||
fbd00bf5f8 | |||
a598276aa0 | |||
deb91bd12b | |||
92b8cf1b64 | |||
58e001c3d5 | |||
54ac920f81 | |||
fc9ae9e938 | |||
81d70e5d41 | |||
6bb809fd82 | |||
1a55c3de2b | |||
16b59f3e65 | |||
de0e710378 | |||
4a6f3330ed | |||
d4d8ab2757 | |||
950a107a13 | |||
aadeba46f2 | |||
6f2f53994c | |||
faabc62e5c | |||
ec42b597ab | |||
925477b3a2 | |||
cefc1a57ee | |||
53b25d61f7 | |||
1240ed6c6d | |||
3740b45e1a | |||
eb8181daa0 | |||
6a25cf6c5d | |||
be42e5562d | |||
19d2fcb542 | |||
06bdfd7f04 | |||
3c6cb5a2d1 | |||
2467754d0b | |||
97d71758a4 | |||
d4dce5b250 | |||
cf36da2e5d | |||
8f70354e3c | |||
4f868c2ef2 | |||
b69e55eae9 | |||
21e29744c2 | |||
e490d25791 | |||
c5870fcab2 | |||
8850446bc2 | |||
10b9878f03 | |||
f28834ffad | |||
a2947975e4 | |||
0735e35a1d | |||
27638d6a92 | |||
8de92943ab | |||
99bb4c2cf8 | |||
b225f6f3ff | |||
ecd5fab082 | |||
b85b5d8c3b | |||
8068e27eaf | |||
af43330fd6 | |||
1f88330133 | |||
64aa385f90 | |||
cae04b8198 | |||
1be792fbd8 | |||
6a64d6b536 | |||
243be8f7bb | |||
a7cf454760 | |||
5aa43eeb04 | |||
9398fbcf9e | |||
d010ab7d43 | |||
0891d9df6a | |||
b976d3e874 | |||
b42f46bad4 | |||
8e17dfbaee | |||
ef2f7779df | |||
afacfe5734 | |||
91db046978 | |||
5acffc2bb6 | |||
c5a935474b | |||
89417ce455 | |||
ec9085ff06 | |||
c6e638ddc2 | |||
7a85038c11 | |||
3170b2f92c | |||
555b33c252 | |||
f5047e3ab0 | |||
f6070e33f5 | |||
3a53e60af7 | |||
602a9df97b | |||
425e68c874 | |||
0bbe74e2bf | |||
61b06eff06 | |||
1919a9dd77 | |||
b3323c5d00 | |||
fd9ce531c9 | |||
ed01a844ef | |||
c6843a1307 | |||
3d21ea1df4 | |||
00a16bee76 | |||
b9a2323c51 | |||
66aabcc371 | |||
de389e2385 | |||
52a0b19f7e | |||
33f547c870 | |||
0767107730 | |||
c960601a1e | |||
146d54813c | |||
388367785d | |||
a890b93869 | |||
8cf0a5dace | |||
547c01f481 | |||
fabb03ca69 | |||
352a2057dd | |||
698780dfda | |||
93a19cfda0 | |||
e3244c1d21 | |||
7488fca4b4 | |||
d1fd56b757 | |||
ada44d84f8 | |||
7d4ce41e12 | |||
798245b8db | |||
ccf956d5c6 | |||
f98b5b651b | |||
cadb710c38 | |||
095cb75d6c | |||
2113029a14 | |||
c590cb86cf | |||
0c7b0c7526 | |||
ef8bbb93ae | |||
068e70a0d7 | |||
3f108a0ed8 | |||
e9687b8cbe | |||
54a98379ba | |||
1de546eaec | |||
dbf2bd5aba | |||
f2386f126e | |||
ffc97905f3 | |||
dbd324f202 | |||
98b7597fde | |||
18cfe67719 | |||
456da49f36 | |||
6e499a2733 | |||
40463088d2 | |||
9394a76962 | |||
e5ba5d51fe | |||
55782d3929 | |||
47f4b4247c | |||
7551995c6e | |||
c6aa95f1c7 | |||
a9ee43791d | |||
27231fd5d7 | |||
eb60bba0d5 | |||
5044f8ce6d | |||
13cc7ca77b | |||
6d207b0b0a | |||
6345f99aa0 | |||
fcc47038e4 | |||
95551a753a | |||
19b3d648a4 | |||
1bbe402281 | |||
ac48c59b5a | |||
c05d6b96a2 | |||
b99929cf16 | |||
72168fae29 | |||
96eeb91493 | |||
c2b4d14af5 | |||
627e8a250e | |||
ecb1ce8135 | |||
6e5ad60cea | |||
3651fb5daf | |||
fa23d307aa | |||
dbf21796ff | |||
55a74bdd7d | |||
9cdb601b91 | |||
93e665949f | |||
5144a9af4c | |||
93772e492b | |||
f97b65d44b | |||
ee70ebfb10 | |||
5631a99f00 | |||
5ea9595c9c | |||
1b6f920265 | |||
3bf8c915d5 | |||
1cc578be66 | |||
36f8f8bae5 | |||
68058fb2ae | |||
0abbe8288e | |||
a9de9101ca | |||
19ed9dc5e9 | |||
b5ae712f35 | |||
b1c01b53e7 | |||
5b31e47573 | |||
d2a9a294f2 | |||
32c7c58518 | |||
349c6e50c2 | |||
2e8027fa04 | |||
0bc1b33663 | |||
8564f9ef87 | |||
7cfd84d8f0 | |||
f2e40ec7e3 | |||
1f1255a829 | |||
53b65a9d1a | |||
9437e2d3ab | |||
6a7b78abc2 | |||
9c24e5195b | |||
306b046b9a | |||
10b50c5845 | |||
3912a57df2 | |||
08142d393f | |||
0f7af256d6 | |||
16076cc46f | |||
8aa16e66e4 | |||
a9b32e2f97 | |||
b2d272bf6f | |||
31ef6fb6a6 | |||
c9c059a008 | |||
a5e84b5482 | |||
7c697e09f3 | |||
b99afd82b2 | |||
9397598376 | |||
91ffe4e7f9 | |||
430a207865 | |||
894873b373 | |||
1ce2a1b846 | |||
4731ccfafe | |||
3e9c28d0a4 | |||
12d4394d73 | |||
b872e7072d | |||
b0ea657b18 | |||
a5f26b2ce0 | |||
c1b9b5c5e2 | |||
b288393cd4 | |||
767ffc09d0 | |||
446dc0a17b | |||
c85474ec37 | |||
3a59b75f4a | |||
8deac81364 | |||
98485c528e | |||
1a5b626f96 | |||
5736a1542c | |||
43854dc828 | |||
64af78110a | |||
59be3c7746 | |||
9e30f01fce | |||
fc8fe5317a | |||
92090ced9f | |||
ce47d4cf39 | |||
c11367553e | |||
c61529e4d4 | |||
8709f3300c | |||
e78bc1b32f | |||
89c4a7b4a4 | |||
9701907b82 | |||
7ac73bfcf9 | |||
1423d5d45b | |||
768ff67e8c | |||
b13deefd91 | |||
69e445211e | |||
ada8fc2a55 | |||
d9cc45f9ce | |||
515a402db7 | |||
813f70b806 | |||
a302a72379 | |||
e390f5b2d1 | |||
f09305a444 | |||
60189ce9ca | |||
fdc445e6a1 | |||
e3f8afcf80 | |||
9e2e8132a6 | |||
26f9bbeefa | |||
49b6c71079 | |||
97acc77e0a | |||
eb1e0427c1 | |||
6e0c9acb34 | |||
b75d659707 | |||
8894861a59 | |||
7878755acd | |||
2b62d6646e | |||
4f81f750ce | |||
fa216e2e93 | |||
181bd903be | |||
23c69c456a | |||
c73fce4f58 | |||
bd0ef69ece | |||
19ee98b36d | |||
75d4246b79 | |||
d2fd84d98c | |||
678378403b | |||
7f32d0eb9a | |||
f1b3598a0f | |||
07767c9376 | |||
5a3f9d1417 | |||
44a6303c91 | |||
5f7f80fdee | |||
a332a465ef | |||
8b16fed926 | |||
be10dd629b | |||
a6a868cbc1 | |||
a9ed275f4e | |||
fbc5378158 | |||
20210b614d | |||
063877a615 | |||
a73d50d379 | |||
9568f4dbd6 | |||
9b2ceb0d44 | |||
2deb185550 | |||
69d4719687 | |||
d31e566873 | |||
0ddcefce80 | |||
4c45d35507 | |||
829e49275d | |||
143309448e | |||
1f038ecee2 | |||
1b1f2ea72c | |||
6e1a54753e | |||
67d1f06c91 | |||
d37de6bc00 | |||
8deced771d | |||
c380512cc8 | |||
e0b06bc4de | |||
1bd6107ec7 | |||
ce1409fb6c | |||
b6b97f4706 | |||
cd12e177ea | |||
31c6ea9fda | |||
20931ccc1d | |||
9c9f441cff | |||
36822c128c | |||
29d3fdaa1d | |||
ac5167b8a3 | |||
0db434a922 | |||
3c0675486c | |||
f6d56e7e29 | |||
fac56390a0 | |||
c6e3229f0b | |||
ace30933bd | |||
d313f1576b | |||
ac07576676 | |||
df42480284 | |||
d2f722f032 | |||
a8fdcab927 | |||
0cba3c7788 | |||
0d414ec0ea | |||
c42b34a46b | |||
7a1050300d | |||
a64e87a6b1 | |||
81e9f2d608 | |||
ddbd8153e2 | |||
f7037b9f33 | |||
67a6fa6399 | |||
a35b8f5862 | |||
5b7c6f1b0e | |||
662101fd1f | |||
3f633460a8 | |||
be2d1a522a | |||
d6f5b8e421 | |||
b424c5dd27 | |||
2a83d79ace | |||
1ed24a5eef | |||
f2961cb536 | |||
4d66e42708 | |||
bd3a721753 | |||
25c3086d7a | |||
1bdd09342a | |||
ad6d773d26 | |||
b555ccd549 | |||
9445354b31 | |||
a42f2f7217 | |||
d1aa1f46da | |||
a1be924fa4 | |||
db60427e21 | |||
d3e2f41561 | |||
8840f6ef63 | |||
3b103b22e2 | |||
158f4c1c4c | |||
42606a499b | |||
c0841120bf | |||
61442a7e4a | |||
98876df5c5 | |||
a9680d6088 | |||
7eb6320d74 | |||
47aba4a996 | |||
643b36b732 | |||
001869641d | |||
bec538c543 | |||
c63ba3f378 | |||
0fb2b5550a | |||
762294c0f9 | |||
2a2ab94e97 | |||
53cab07a48 | |||
2604dc14fe | |||
06f67c738c | |||
1b001060a3 | |||
a960ce9454 | |||
439bdc54d6 | |||
e6b5810e03 | |||
89b73a4d89 | |||
ed3f36e72a | |||
78b711ec9d | |||
4be0a707b1 | |||
1e73b42c58 | |||
3df3bceccb | |||
a4370458cb | |||
742bad4080 | |||
f15946e216 | |||
b54415dcde | |||
471293ba25 | |||
3e7320734c | |||
3131e557d9 | |||
1efc7eecbf | |||
15ec6a9284 | |||
dc1359a763 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2022.12.2
|
||||
current_version = 2023.8.5
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
@ -6,3 +6,5 @@ dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
Dockerfile
|
||||
authentik/enterprise
|
||||
blueprints/local
|
||||
|
@ -7,8 +7,14 @@ charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[html]
|
||||
[*.html]
|
||||
indent_size = 2
|
||||
|
||||
[yaml]
|
||||
[*.{yaml,yml}]
|
||||
indent_size = 2
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
title: ""
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
@ -12,6 +11,7 @@ A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
@ -27,8 +27,9 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
title: ""
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
|
17
.github/ISSUE_TEMPLATE/hackathon_idea.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/hackathon_idea.md
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
---
|
||||
name: Hackathon Idea
|
||||
about: Propose an idea for the hackathon
|
||||
title: ""
|
||||
labels: hackathon
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe the idea**
|
||||
|
||||
A clear concise description of the idea you want to implement
|
||||
|
||||
You're also free to work on existing GitHub issues, whether they be feature requests or bugs, just link the existing GitHub issue here.
|
||||
|
||||
<!-- Don't modify below here -->
|
||||
|
||||
If you want to help working on this idea or want to contribute in any other way, react to this issue with a :rocket:
|
10
.github/ISSUE_TEMPLATE/question.md
vendored
10
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Question
|
||||
about: Ask a question about a feature or specific configuration
|
||||
title: ''
|
||||
title: ""
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe your question/**
|
||||
@ -20,8 +19,9 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
@ -1,5 +1,5 @@
|
||||
name: 'Comment usage instructions on PRs'
|
||||
description: 'Comment usage instructions on PRs'
|
||||
name: "Comment usage instructions on PRs"
|
||||
description: "Comment usage instructions on PRs"
|
||||
|
||||
inputs:
|
||||
tag:
|
||||
@ -17,7 +17,7 @@ runs:
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik PR Installation instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
@ -38,6 +38,14 @@ runs:
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
@ -54,6 +62,17 @@ runs:
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
edit-mode: replace
|
||||
|
12
.github/actions/docker-push-variables/action.yml
vendored
12
.github/actions/docker-push-variables/action.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: 'Prepare docker environment variables'
|
||||
description: 'Prepare docker environment variables'
|
||||
name: "Prepare docker environment variables"
|
||||
description: "Prepare docker environment variables"
|
||||
|
||||
outputs:
|
||||
shouldBuild:
|
||||
@ -17,6 +17,9 @@ outputs:
|
||||
sha:
|
||||
description: "sha"
|
||||
value: ${{ steps.ev.outputs.sha }}
|
||||
shortHash:
|
||||
description: "shortHash"
|
||||
value: ${{ steps.ev.outputs.shortHash }}
|
||||
version:
|
||||
description: "version"
|
||||
value: ${{ steps.ev.outputs.version }}
|
||||
@ -48,11 +51,14 @@ runs:
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||
print("sha=%s" % sha, file=_output)
|
||||
print("shortHash=%s" % sha[:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
||||
|
22
.github/actions/setup/action.yml
vendored
22
.github/actions/setup/action.yml
vendored
@ -1,5 +1,10 @@
|
||||
name: 'Setup authentik testing environment'
|
||||
description: 'Setup authentik testing environment'
|
||||
name: "Setup authentik testing environment"
|
||||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_tag:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -9,21 +14,22 @@ runs:
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt update
|
||||
sudo apt install -y libxmlsec1-dev pkg-config gettext
|
||||
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'poetry'
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3.1.0
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
|
14
.github/actions/setup/docker-compose.yml
vendored
14
.github/actions/setup/docker-compose.yml
vendored
@ -1,23 +1,21 @@
|
||||
version: '3.7'
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
container_name: postgres
|
||||
image: library/postgres:12
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
- db-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: authentik
|
||||
ports:
|
||||
- 5432:5432
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
container_name: redis
|
||||
image: library/redis
|
||||
image: docker.io/library/redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
2
.github/cherry-pick-bot.yml
vendored
Normal file
2
.github/cherry-pick-bot.yml
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
enabled: true
|
||||
preservePullRequestTitle: true
|
11
.github/codecov.yml
vendored
11
.github/codecov.yml
vendored
@ -1,3 +1,10 @@
|
||||
coverage:
|
||||
precision: 2
|
||||
round: up
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# adjust accordingly based on how flaky your tests are
|
||||
# this allows a 1% drop from the previous base commit coverage
|
||||
threshold: 1%
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
|
1
.github/codespell-dictionary.txt
vendored
Normal file
1
.github/codespell-dictionary.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
authentic->authentik
|
144
.github/dependabot.yml
vendored
144
.github/dependabot.yml
vendored
@ -1,62 +1,86 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
labels:
|
||||
- dependencies
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
groups:
|
||||
sentry:
|
||||
patterns:
|
||||
- "@sentry/*"
|
||||
babel:
|
||||
patterns:
|
||||
- "@babel/*"
|
||||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@typescript-eslint/eslint-*"
|
||||
- "eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
patterns:
|
||||
- "@storybook/*"
|
||||
- "*storybook*"
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
labels:
|
||||
- dependencies
|
||||
groups:
|
||||
docusaurus:
|
||||
patterns:
|
||||
- "@docusaurus/*"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
labels:
|
||||
- dependencies
|
||||
|
40
.github/pull_request_template.md
vendored
40
.github/pull_request_template.md
vendored
@ -1,19 +1,35 @@
|
||||
<!--
|
||||
👋 Hello there! Welcome.
|
||||
👋 Hi there! Welcome.
|
||||
|
||||
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/main/CONTRIBUTING.md#how-can-i-contribute).
|
||||
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
|
||||
-->
|
||||
|
||||
# Details
|
||||
* **Does this resolve an issue?**
|
||||
Resolves #
|
||||
## Details
|
||||
|
||||
## Changes
|
||||
### New Features
|
||||
* Adds feature which does x, y, and z.
|
||||
<!--
|
||||
Explain what this PR changes, what the rationale behind the change is, if any new requirements are introduced or any breaking changes caused by this PR.
|
||||
|
||||
### Breaking Changes
|
||||
* Adds breaking change which causes \<issue\>.
|
||||
Ideally also link an Issue for context that this PR will close using `closes #`
|
||||
-->
|
||||
REPLACE ME
|
||||
|
||||
## Additional
|
||||
Any further notes or comments you want to make.
|
||||
---
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Local tests pass (`ak test authentik/`)
|
||||
- [ ] The code has been formatted (`make lint-fix`)
|
||||
|
||||
If an API change has been made
|
||||
|
||||
- [ ] The API schema has been updated (`make gen-build`)
|
||||
|
||||
If changes to the frontend have been made
|
||||
|
||||
- [ ] The code has been formatted (`make web`)
|
||||
- [ ] The translation files have been updated (`make i18n-extract`)
|
||||
|
||||
If applicable
|
||||
|
||||
- [ ] The documentation has been updated
|
||||
- [ ] The documentation has been formatted (`make website`)
|
||||
|
18
.github/stale.yml
vendored
18
.github/stale.yml
vendored
@ -1,18 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- pr_wanted
|
||||
- enhancement
|
||||
- bug/confirmed
|
||||
- enhancement/confirmed
|
||||
- question
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
8
.github/transifex.yml
vendored
8
.github/transifex.yml
vendored
@ -2,15 +2,15 @@ git:
|
||||
filters:
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
file_format: XLIFF
|
||||
source_language: en
|
||||
source_file: web/src/locales/en.po
|
||||
source_file: web/xliff/en.xlf
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||
translation_files_expression: "web/xliff/<lang>.xlf"
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: locale/en/LC_MESSAGES/django.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
||||
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
|
||||
|
150
.github/workflows/ci-main.yml
vendored
150
.github/workflows/ci-main.yml
vendored
@ -11,6 +11,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -23,12 +24,14 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- pylint
|
||||
- black
|
||||
- isort
|
||||
- bandit
|
||||
- pyright
|
||||
- black
|
||||
- codespell
|
||||
- isort
|
||||
- pending-migrations
|
||||
- pylint
|
||||
- pyright
|
||||
- ruff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@ -59,7 +62,7 @@ jobs:
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
@ -79,11 +82,21 @@ jobs:
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
@ -94,22 +107,43 @@ jobs:
|
||||
flags: unit
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.5.0
|
||||
uses: helm/kind-action@v1.8.0
|
||||
- name: run integration
|
||||
run: |
|
||||
poetry run make test-integration
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: integration
|
||||
test-e2e-provider:
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: proxy
|
||||
glob: tests/e2e/test_provider_proxy*
|
||||
- name: oauth
|
||||
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
|
||||
- name: oauth-oidc
|
||||
glob: tests/e2e/test_provider_oidc*
|
||||
- name: saml
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: radius
|
||||
glob: tests/e2e/test_provider_radius*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
@ -131,36 +165,7 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-provider
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: e2e
|
||||
test-e2e-rest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-rest
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
@ -173,24 +178,23 @@ jobs:
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e-rest
|
||||
- test-e2e-provider
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
timeout-minutes: 120
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -205,8 +209,8 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
@ -214,14 +218,58 @@ jobs:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
|
54
.github/workflows/ci-outpost.yml
vendored
54
.github/workflows/ci-outpost.yml
vendored
@ -9,15 +9,16 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
@ -28,13 +29,17 @@ jobs:
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.52.2
|
||||
args: --timeout 5000s --verbose
|
||||
skip-pkg-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
@ -47,7 +52,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
build-container:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -57,13 +62,17 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
- radius
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -80,20 +89,21 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
build-outpost-binary:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
build-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -104,17 +114,20 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
@ -128,8 +141,5 @@ jobs:
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
export CGO_ENABLED=0
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
|
41
.github/workflows/ci-web.yml
vendored
41
.github/workflows/ci-web.yml
vendored
@ -9,16 +9,17 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-eslint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -30,11 +31,11 @@ jobs:
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -46,11 +47,11 @@ jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -62,11 +63,11 @@ jobs:
|
||||
lint-lit-analyse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: |
|
||||
@ -94,11 +95,11 @@ jobs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
|
46
.github/workflows/ci-website.yml
vendored
46
.github/workflows/ci-website.yml
vendored
@ -9,25 +9,63 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: prettier
|
||||
working-directory: website/
|
||||
run: npm run prettier-check
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: test
|
||||
working-directory: website/
|
||||
run: npm test
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.job }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: build
|
||||
working-directory: website/
|
||||
run: npm run ${{ matrix.job }}
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-prettier
|
||||
- test
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
|
56
.github/workflows/codeql-analysis.yml
vendored
56
.github/workflows/codeql-analysis.yml
vendored
@ -2,12 +2,11 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, '*', next, version* ]
|
||||
branches: [main, "*", next, version*]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '30 6 * * 5'
|
||||
- cron: "30 6 * * 5"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
@ -21,40 +20,17 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
language: ["go", "javascript", "python"]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
34
.github/workflows/gha-cache-cleanup.yml
vendored
Normal file
34
.github/workflows/gha-cache-cleanup.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
|
||||
name: Cleanup cache after PR is closed
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
|
||||
# Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR; do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
11
.github/workflows/ghcr-retention.yml
vendored
11
.github/workflows/ghcr-retention.yml
vendored
@ -2,7 +2,7 @@ name: ghcr-retention
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -10,13 +10,18 @@ jobs:
|
||||
name: Delete old unused container images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: sondrelg/container-retention-policy@v1
|
||||
uses: snok/container-retention-policy@v2
|
||||
with:
|
||||
image-names: dev-server,dev-ldap,dev-proxy
|
||||
cut-off: One week ago UTC
|
||||
account-type: org
|
||||
org-name: goauthentik
|
||||
untagged-only: false
|
||||
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
skip-tags: gh-next,gh-main
|
||||
|
61
.github/workflows/image-compress.yml
vendored
Normal file
61
.github/workflows/image-compress.yml
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
---
|
||||
name: authentik-compress-images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "**.jpg"
|
||||
- "**.jpeg"
|
||||
- "**.png"
|
||||
- "**.webp"
|
||||
pull_request:
|
||||
paths:
|
||||
- "**.jpg"
|
||||
- "**.jpeg"
|
||||
- "**.png"
|
||||
- "**.webp"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
compress:
|
||||
name: compress
|
||||
runs-on: ubuntu-latest
|
||||
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway
|
||||
if: |
|
||||
github.repository == 'goauthentik/authentik' &&
|
||||
(github.event_name != 'pull_request' ||
|
||||
github.event.pull_request.head.repo.full_name == github.repository)
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
id: compress
|
||||
uses: calibreapp/image-actions@main
|
||||
with:
|
||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
title: "*: Auto compress images"
|
||||
branch-suffix: timestamp
|
||||
commit-messsage: "*: compress images"
|
||||
body: ${{ steps.compress.outputs.markdown }}
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
31
.github/workflows/publish-source-docs.yml
vendored
Normal file
31
.github/workflows/publish-source-docs.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
name: authentik-publish-source-docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
publish-source-docs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
run: |
|
||||
poetry run make migrate
|
||||
poetry run ak build_source_docs
|
||||
- name: Publish
|
||||
uses: netlify/actions/cli@master
|
||||
with:
|
||||
args: deploy --dir=source_docs --prod
|
||||
env:
|
||||
NETLIFY_SITE_ID: eb246b7b-1d83-4f69-89f7-01a936b4ca59
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
21
.github/workflows/release-next-branch.yml
vendored
Normal file
21
.github/workflows/release-next-branch.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: authentik-on-release-next-branch
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 12 * * *" # every day at noon
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push to the next branch
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
update-next:
|
||||
runs-on: ubuntu-latest
|
||||
environment: internal-production
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
- run: |
|
||||
git push origin --force main:next
|
49
.github/workflows/release-publish.yml
vendored
49
.github/workflows/release-publish.yml
vendored
@ -7,10 +7,13 @@ on:
|
||||
jobs:
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -27,11 +30,11 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: |
|
||||
@ -43,22 +46,27 @@ jobs:
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -75,8 +83,8 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
@ -88,31 +96,33 @@ jobs:
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload binaries to the release
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20.5"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
@ -124,6 +134,7 @@ jobs:
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
export CGO_ENABLED=0
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- name: Upload binaries to release
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
@ -174,5 +185,5 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
with:
|
||||
version: authentik@${{ steps.ev.outputs.version }}
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
sourcemaps: "./web/dist"
|
||||
url_prefix: "~/static/dist"
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -3,7 +3,7 @@ name: authentik-on-tag
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'version/*'
|
||||
- "version/*"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -22,18 +22,23 @@ jobs:
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: ${{ steps.generate_token.outputs.token }}
|
||||
script: |
|
||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.1.4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
|
33
.github/workflows/repo-stale.yml
vendored
Normal file
33
.github/workflows/repo-stale.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
name: 'authentik-repo-stale'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to update issues and PRs
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Don't stale PRs, so only apply to PRs with a non-existent label
|
||||
only-pr-labels: foo
|
34
.github/workflows/translation-advice.yml
vendored
Normal file
34
.github/workflows/translation-advice.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: authentik-translation-advice
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "!**"
|
||||
- "locale/**"
|
||||
- "web/src/locales/**"
|
||||
|
||||
jobs:
|
||||
post-comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik translations instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
### authentik translations instructions
|
||||
|
||||
Thanks for your pull request!
|
||||
|
||||
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.
|
20
.github/workflows/translation-compile.yml
vendored
20
.github/workflows/translation-compile.yml
vendored
@ -1,12 +1,9 @@
|
||||
name: authentik-backend-translate-compile
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
paths:
|
||||
- '/locale/'
|
||||
pull_request:
|
||||
paths:
|
||||
- '/locale/'
|
||||
- "locale/**"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@ -18,16 +15,23 @@ jobs:
|
||||
compile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run compile
|
||||
run: poetry run ./manage.py compilemessages
|
||||
run: poetry run ak compilemessages
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
|
45
.github/workflows/translation-rename.yml
vendored
Normal file
45
.github/workflows/translation-rename.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
# Rename transifex pull requests to have a correct naming
|
||||
# Also enables auto squash-merge
|
||||
name: authentik-translation-transifex-rename
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
|
||||
jobs:
|
||||
rename_pr:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Get current title
|
||||
id: title
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
title=$(curl -q -L \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${GH_TOKEN}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
|
||||
echo "title=${title}" >> "$GITHUB_OUTPUT"
|
||||
- name: Rename
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
curl -L \
|
||||
-X PATCH \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${GH_TOKEN}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
|
||||
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ github.event.pull_request.number }}
|
||||
merge-method: squash
|
31
.github/workflows/web-api-publish.yml
vendored
31
.github/workflows/web-api-publish.yml
vendored
@ -1,19 +1,26 @@
|
||||
name: authentik-web-api-publish
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'schema.yml'
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20.5"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
- name: Publish package
|
||||
@ -28,14 +35,20 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: update-web-api-client
|
||||
commit-message: "web: bump API Client version"
|
||||
title: "web: bump API Client version"
|
||||
body: "web: bump API Client version"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -166,6 +166,7 @@ dmypy.json
|
||||
# SageMath parsed files
|
||||
|
||||
# Environments
|
||||
**/.DS_Store
|
||||
|
||||
# Spyder project settings
|
||||
|
||||
@ -200,3 +201,8 @@ media/
|
||||
.idea/
|
||||
/gen-*/
|
||||
data/
|
||||
|
||||
# Local Netlify folder
|
||||
.netlify
|
||||
.ruff_cache
|
||||
source_docs/
|
||||
|
21
.vscode/extensions.json
vendored
Normal file
21
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
"github.vscode-github-actions",
|
||||
"golang.go",
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-python.pylint",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
]
|
||||
}
|
27
.vscode/launch.json
vendored
Normal file
27
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: PDB attach Server",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6800
|
||||
},
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Python: PDB attach Worker",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 6900
|
||||
},
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
]
|
||||
}
|
14
.vscode/settings.json
vendored
14
.vscode/settings.json
vendored
@ -14,7 +14,10 @@
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless",
|
||||
"kubernetes"
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo",
|
||||
"scim",
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
@ -28,7 +31,8 @@
|
||||
"!Format sequence",
|
||||
"!Condition sequence",
|
||||
"!Env sequence",
|
||||
"!Env scalar"
|
||||
"!Env scalar",
|
||||
"!If sequence"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||
@ -44,5 +48,11 @@
|
||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||
"ignoreCase": false
|
||||
}
|
||||
],
|
||||
"go.testFlags": [
|
||||
"-count=1"
|
||||
],
|
||||
"github-actions.workflows.pinned.workflows": [
|
||||
".github/workflows/ci-main.yml"
|
||||
]
|
||||
}
|
||||
|
2
CODEOWNERS
Normal file
2
CODEOWNERS
Normal file
@ -0,0 +1,2 @@
|
||||
* @goauthentik/core
|
||||
website/docs/security/** @goauthentik/security
|
178
CONTRIBUTING.md
178
CONTRIBUTING.md
@ -1,178 +0,0 @@
|
||||
# Contributing to authentik
|
||||
|
||||
:+1::tada: Thanks for taking the time to contribute! :tada::+1:
|
||||
|
||||
The following is a set of guidelines for contributing to authentik and its components, which are hosted in the [goauthentik Organization](https://github.com/goauthentik) on GitHub. These are mostly guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request.
|
||||
|
||||
#### Table Of Contents
|
||||
|
||||
[Code of Conduct](#code-of-conduct)
|
||||
|
||||
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
||||
|
||||
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
||||
|
||||
- [The components](#the-components)
|
||||
- [authentik's structure](#authentiks-structure)
|
||||
|
||||
[How Can I Contribute?](#how-can-i-contribute)
|
||||
|
||||
- [Reporting Bugs](#reporting-bugs)
|
||||
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||
- [Your First Code Contribution](#your-first-code-contribution)
|
||||
- [Pull Requests](#pull-requests)
|
||||
|
||||
[Styleguides](#styleguides)
|
||||
|
||||
- [Git Commit Messages](#git-commit-messages)
|
||||
- [Python Styleguide](#python-styleguide)
|
||||
- [Documentation Styleguide](#documentation-styleguide)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Basically, don't be a dickhead. This is an open-source non-profit project, that is made in the free time of Volunteers. If there's something you dislike or think can be done better, tell us! We'd love to hear any suggestions for improvement.
|
||||
|
||||
## I don't want to read this whole thing I just have a question!!!
|
||||
|
||||
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://goauthentik.io/discord)
|
||||
|
||||
## What should I know before I get started?
|
||||
|
||||
### The components
|
||||
|
||||
authentik consists of a few larger components:
|
||||
|
||||
- _authentik_ the actual application server, is described below.
|
||||
- _outpost-proxy_ is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- _outpost-ldap_ is a Go LDAP server that uses the _authentik_ application server as its backend
|
||||
- _web_ is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- _website_ is the Website/documentation, which uses docusaurus.
|
||||
|
||||
### authentik's structure
|
||||
|
||||
authentik is at it's very core a Django project. It consists of many individual django applications. These applications are intended to separate concerns, and they may share code between each other.
|
||||
|
||||
These are the current packages:
|
||||
<a id="authentik-packages"/>
|
||||
|
||||
```
|
||||
authentik
|
||||
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
||||
├── api - General API Configuration (Routes, Schema and general API utilities)
|
||||
├── core - Core authentik functionality, central routes, core Models
|
||||
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
||||
├── events - Event Log, middleware and signals to generate signals
|
||||
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
||||
├── lib - Generic library of functions, few dependencies on other packages.
|
||||
├── managed - Handle managed models and their state.
|
||||
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
||||
├── policies - General PolicyEngine
|
||||
│ ├── dummy - A Dummy policy used for testing
|
||||
│ ├── event_matcher - Match events based on different criteria
|
||||
│ ├── expiry - Check when a user's password was last set
|
||||
│ ├── expression - Execute any arbitrary python code
|
||||
│ ├── hibp - Check a password against HaveIBeenPwned
|
||||
│ ├── password - Check a password against several rules
|
||||
│ └── reputation - Check the user's/client's reputation
|
||||
├── providers
|
||||
│ ├── ldap - Provide LDAP access to authentik users/groups using an outpost
|
||||
│ ├── oauth2 - OIDC-compliant OAuth2 provider
|
||||
│ ├── proxy - Provides an identity-aware proxy using an outpost
|
||||
│ └── saml - SAML2 Provider
|
||||
├── recovery - Generate keys to use in case you lock yourself out
|
||||
├── root - Root django application, contains global settings and routes
|
||||
├── sources
|
||||
│ ├── ldap - Sync LDAP users from OpenLDAP or Active Directory into authentik
|
||||
│ ├── oauth - OAuth1 and OAuth2 Source
|
||||
│ ├── plex - Plex source
|
||||
│ └── saml - SAML2 Source
|
||||
├── stages
|
||||
│ ├── authenticator_duo - Configure a DUO authenticator
|
||||
│ ├── authenticator_static - Configure TOTP backup keys
|
||||
│ ├── authenticator_totp - Configure a TOTP authenticator
|
||||
│ ├── authenticator_validate - Validate any authenticator
|
||||
│ ├── authenticator_webauthn - Configure a WebAuthn authenticator
|
||||
│ ├── captcha - Make the user pass a captcha
|
||||
│ ├── consent - Let the user decide if they want to consent to an action
|
||||
│ ├── deny - Static deny, can be used with policies
|
||||
│ ├── dummy - Dummy stage to test
|
||||
│ ├── email - Send the user an email and block execution until they click the link
|
||||
│ ├── identification - Identify a user with any combination of fields
|
||||
│ ├── invitation - Invitation system to limit flows to certain users
|
||||
│ ├── password - Password authentication
|
||||
│ ├── prompt - Arbitrary prompts
|
||||
│ ├── user_delete - Delete the currently pending user
|
||||
│ ├── user_login - Login the currently pending user
|
||||
│ ├── user_logout - Logout the currently pending user
|
||||
│ └── user_write - Write any currenetly pending data to the user.
|
||||
└── tenants - Soft tennancy, configure defaults and branding per domain
|
||||
```
|
||||
|
||||
This django project is running in gunicorn, which spawns multiple workers and threads. Gunicorn is run from a lightweight Go application which reverse-proxies it, handles static files and will eventually gain more functionality as more code is migrated to go.
|
||||
|
||||
There are also several background tasks which run in Celery, the root celery application is defined in `authentik.root.celery`.
|
||||
|
||||
## How Can I Contribute?
|
||||
|
||||
### Reporting Bugs
|
||||
|
||||
This section guides you through submitting a bug report for authentik. Following these guidelines helps maintainers and the community understand your report, reproduce the behavior, and find related reports.
|
||||
|
||||
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
|
||||
|
||||
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
|
||||
|
||||
### Suggesting Enhancements
|
||||
|
||||
This section guides you through submitting an enhancement suggestion for authentik, including completely new features and minor improvements to existing functionality. Following these guidelines helps maintainers and the community understand your suggestion and find related suggestions.
|
||||
|
||||
When you are creating an enhancement suggestion, please fill in [the template](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=), including the steps that you imagine you would take if the feature you're requesting existed.
|
||||
|
||||
### Your First Code Contribution
|
||||
|
||||
#### Local development
|
||||
|
||||
authentik can be run locally, all though depending on which part you want to work on, different pre-requisites are required.
|
||||
|
||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
### Pull Requests
|
||||
|
||||
The process described here has several goals:
|
||||
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
1. Follow the [styleguides](#styleguides)
|
||||
2. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing?</summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
|
||||
3. Ensure your Code has tests. While it is not always possible to test every single case, the majority of the code should be tested.
|
||||
|
||||
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
|
||||
|
||||
## Styleguides
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
- Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
- Reference issues and pull requests liberally after the first line
|
||||
|
||||
### Python Styleguide
|
||||
|
||||
All Python code is linted with [black](https://black.readthedocs.io/en/stable/), [PyLint](https://www.pylint.org/) and [isort](https://pycqa.github.io/isort/).
|
||||
|
||||
authentik runs on Python 3.9 at the time of writing this.
|
||||
|
||||
- Use native type-annotations wherever possible.
|
||||
- Add meaningful docstrings when possible.
|
||||
- Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
- If your code changes central functions, make sure nothing else is broken.
|
||||
|
||||
### Documentation Styleguide
|
||||
|
||||
- Use [MDX](https://mdxjs.com/) whenever appropriate.
|
1
CONTRIBUTING.md
Symbolic link
1
CONTRIBUTING.md
Symbolic link
@ -0,0 +1 @@
|
||||
website/developer-docs/index.md
|
50
Dockerfile
50
Dockerfile
@ -1,5 +1,5 @@
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as website-builder
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
@ -7,20 +7,20 @@ COPY ./SECURITY.md /work/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
RUN npm ci && npm run build-docs-only
|
||||
RUN npm ci --include=dev && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as web-builder
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/web
|
||||
RUN npm ci && npm run build
|
||||
RUN npm ci --include=dev && npm run build
|
||||
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS poetry-locker
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.19.4-bullseye AS go-builder
|
||||
FROM docker.io/golang:1.21.0-bookworm AS go-builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -39,52 +39,56 @@ COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./authentik/lib /work/authentik/lib
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
RUN go build -o /work/bin/authentik ./cmd/server/
|
||||
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||
|
||||
USER root
|
||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "\
|
||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||
/usr/bin/entry.sh || exit 0 \
|
||||
"
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ARG VERSION
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
|
||||
LABEL org.opencontainers.image.version ${VERSION}
|
||||
LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends curl runit && \
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \
|
||||
apt-get autoremove --purge -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
@ -95,13 +99,13 @@ RUN apt-get update && \
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./xml /xml
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /work/authentik /authentik-proxy
|
||||
COPY --from=go-builder /work/bin/authentik /bin/authentik
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
|
9
LICENSE
9
LICENSE
@ -1,6 +1,11 @@
|
||||
MIT License
|
||||
Copyright (c) 2023 Jens Langhammer
|
||||
|
||||
Copyright (c) 2022 Jens Langhammer
|
||||
Portions of this software are licensed as follows:
|
||||
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
|
||||
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
|
||||
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
|
||||
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
|
||||
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
91
Makefile
91
Makefile
@ -3,18 +3,24 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/developer-docs \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-integration:
|
||||
coverage run manage.py test tests/integration
|
||||
|
||||
test-e2e-provider:
|
||||
coverage run manage.py test tests/e2e/test_provider*
|
||||
|
||||
test-e2e-rest:
|
||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
@ -33,29 +39,20 @@ test:
|
||||
coverage report
|
||||
|
||||
lint-fix:
|
||||
isort authentik tests scripts lifecycle
|
||||
black authentik tests scripts lifecycle
|
||||
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/docs \
|
||||
website/developer-docs
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint:
|
||||
pylint authentik tests lifecycle
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v ./cmd/server/
|
||||
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
i18n-extract: i18n-extract-core web-i18n-extract
|
||||
|
||||
i18n-extract-core:
|
||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
@ -68,15 +65,20 @@ gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog:
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
npx prettier --write changelog.md
|
||||
|
||||
gen-diff:
|
||||
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
@ -86,7 +88,7 @@ gen-client-ts:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
@ -99,20 +101,21 @@ gen-client-ts:
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go:
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||
mkdir -p templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
||||
cp schema.yml ./gen-go-api/
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--rm -v ${PWD}/gen-go-api:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/gen-go-api \
|
||||
-o /local/ \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf config.yaml ./templates/
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
@ -137,6 +140,9 @@ web-watch:
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-storybook-watch:
|
||||
cd web && npm run storybook
|
||||
|
||||
web-lint-fix:
|
||||
cd web && npm run prettier
|
||||
|
||||
@ -147,8 +153,8 @@ web-lint:
|
||||
web-check-compile:
|
||||
cd web && npm run tsc
|
||||
|
||||
web-extract:
|
||||
cd web && npm run extract
|
||||
web-i18n-extract:
|
||||
cd web && npm run extract-locales
|
||||
|
||||
#########################
|
||||
## Website
|
||||
@ -170,7 +176,6 @@ website-watch:
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
PY_SOURCES=authentik tests lifecycle
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
@ -181,6 +186,12 @@ ci-pylint: ci--meta-debug
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
|
||||
@ -198,6 +209,8 @@ install: web-install website-install
|
||||
|
||||
dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U postgres -h localhost test_authentik || true
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
make migrate
|
||||
|
20
README.md
20
README.md
@ -15,13 +15,13 @@
|
||||
|
||||
## What is authentik?
|
||||
|
||||
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
|
||||
authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
|
||||
|
||||
## Installation
|
||||
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
|
||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
||||
|
||||
## Screenshots
|
||||
|
||||
@ -32,12 +32,16 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Development
|
||||
|
||||
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
@ -49,11 +53,3 @@ This project is proudly sponsored by:
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
52
SECURITY.md
52
SECURITY.md
@ -1,44 +1,50 @@
|
||||
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
|
||||
## What authentik classifies as a CVE
|
||||
|
||||
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
||||
|
||||
“Weakness in an information system, system security procedures, internal controls, or implementation that could be exploited or triggered by a threat source.”
|
||||
|
||||
If it is determined that the issue does qualify as a CVE, a CVE number will be issued to the reporter from GitHub.
|
||||
|
||||
Even if the issue is not a CVE, we still greatly appreciate your help in hardening authentik.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | ------------------ |
|
||||
| 2022.11.x | :white_check_mark: |
|
||||
| 2022.12.x | :white_check_mark: |
|
||||
| Version | Supported |
|
||||
| --- | --- |
|
||||
| 2023.6.x | ✅ |
|
||||
| 2023.8.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the issue.
|
||||
|
||||
## Criticality levels
|
||||
## Severity levels
|
||||
|
||||
### High
|
||||
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||
|
||||
- Authorization bypass
|
||||
- Circumvention of policies
|
||||
|
||||
### Moderate
|
||||
|
||||
- Denial-of-Service attacks
|
||||
|
||||
### Low
|
||||
|
||||
- Unvalidated redirects
|
||||
- Issues requiring uncommon setups
|
||||
| Score | Severity |
|
||||
| --- | --- |
|
||||
| 0.0 | None |
|
||||
| 0.1 – 3.9 | Low |
|
||||
| 4.0 – 6.9 | Medium |
|
||||
| 7.0 – 8.9 | High |
|
||||
| 9.0 – 10.0 | Critical |
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Issue is reported via Email as listed above.
|
||||
1. Report from Github or Issue is reported via Email as listed above.
|
||||
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||
3. A criticality level is assigned.
|
||||
3. A severity level is assigned.
|
||||
4. A fix is created, and if possible tested by the issue reporter.
|
||||
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
||||
6. An announcement is sent out with a fixed release date and severity level of the issue. The announcement will be sent at least 24 hours before the release of the security fix.
|
||||
7. The fixed version is released for the supported versions.
|
||||
|
||||
## Getting security notifications
|
||||
|
||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""authentik"""
|
||||
"""authentik root module"""
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2022.12.2"
|
||||
__version__ = "2023.8.5"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@ from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
from authentik.policies.event_matcher.models import model_choices
|
||||
|
||||
|
||||
class AppSerializer(PassiveSerializer):
|
||||
@ -29,3 +30,17 @@ class AppsViewSet(ViewSet):
|
||||
for app in sorted(get_apps(), key=lambda app: app.name):
|
||||
data.append({"name": app.name, "label": app.verbose_name})
|
||||
return Response(AppSerializer(data, many=True).data)
|
||||
|
||||
|
||||
class ModelViewSet(ViewSet):
|
||||
"""Read-only view list all installed models"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Read-only view list all installed models"""
|
||||
data = []
|
||||
for name, label in model_choices():
|
||||
data.append({"name": name, "label": label})
|
||||
return Response(AppSerializer(data, many=True).data)
|
||||
|
@ -1,4 +1,7 @@
|
||||
"""authentik administration metrics"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models.functions import ExtractHour
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
@ -21,38 +24,44 @@ class CoordinateSerializer(PassiveSerializer):
|
||||
class LoginMetricsSerializer(PassiveSerializer):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
authorizations_per_1h = SerializerMethodField()
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get successful authorizations per hour for the last 24 hours"""
|
||||
def get_authorizations(self, _):
|
||||
"""Get successful authorizations per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.AUTHORIZE_APPLICATION)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""authentik administration overview"""
|
||||
import os
|
||||
import platform
|
||||
from datetime import datetime
|
||||
from sys import version as python_version
|
||||
@ -34,7 +33,6 @@ class RuntimeDict(TypedDict):
|
||||
class SystemSerializer(PassiveSerializer):
|
||||
"""Get system information."""
|
||||
|
||||
env = SerializerMethodField()
|
||||
http_headers = SerializerMethodField()
|
||||
http_host = SerializerMethodField()
|
||||
http_is_secure = SerializerMethodField()
|
||||
@ -43,10 +41,6 @@ class SystemSerializer(PassiveSerializer):
|
||||
server_time = SerializerMethodField()
|
||||
embedded_outpost_host = SerializerMethodField()
|
||||
|
||||
def get_env(self, request: Request) -> dict[str, str]:
|
||||
"""Get Environment"""
|
||||
return os.environ.copy()
|
||||
|
||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||
"""Get HTTP Request headers"""
|
||||
headers = {}
|
||||
@ -97,8 +91,14 @@ class SystemView(APIView):
|
||||
permission_classes = [IsAdminUser]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
@ -7,7 +7,13 @@ from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -26,6 +32,7 @@ class TaskSerializer(PassiveSerializer):
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||
task_duration = SerializerMethodField()
|
||||
|
||||
status = ChoiceField(
|
||||
source="result.status.name",
|
||||
@ -33,13 +40,18 @@ class TaskSerializer(PassiveSerializer):
|
||||
)
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def to_representation(self, instance):
|
||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||
"""Get the duration a task took to run"""
|
||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||
|
||||
def to_representation(self, instance: TaskInfo):
|
||||
"""When a new version of authentik adds fields to TaskInfo,
|
||||
the API will fail with an AttributeError, as the classes
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
except AttributeError: # pragma: no cover
|
||||
# pylint: disable=broad-except
|
||||
except Exception: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
@ -68,7 +80,6 @@ class TaskViewSet(ViewSet):
|
||||
),
|
||||
],
|
||||
)
|
||||
# pylint: disable=invalid-name
|
||||
def retrieve(self, request: Request, pk=None) -> Response:
|
||||
"""Get a single system task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
@ -99,7 +110,6 @@ class TaskViewSet(ViewSet):
|
||||
],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
|
@ -19,7 +19,7 @@ class WorkerView(APIView):
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get currently connected worker count."""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
|
||||
# In debug we run with `task_always_eager`, so tasks are ran on the main process
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
count += 1
|
||||
return Response({"count": count})
|
||||
|
@ -8,7 +8,6 @@ from authentik.root.monitoring import monitoring_set
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
@ -16,8 +15,7 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
||||
task.update_metrics()
|
||||
|
@ -58,7 +58,7 @@ def clear_update_notifications():
|
||||
@prefill_task
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.y_bool("disable_update_check"):
|
||||
if CONFIG.get_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||
return
|
||||
|
@ -9,6 +9,7 @@ from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAdminAPI(TestCase):
|
||||
@ -16,8 +17,8 @@ class TestAdminAPI(TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
self.group = Group.objects.create(name="superusers", is_superuser=True)
|
||||
self.user = User.objects.create(username=generate_id())
|
||||
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
self.group.users.add(self.user)
|
||||
self.group.save()
|
||||
self.client.force_login(self.user)
|
||||
@ -93,6 +94,11 @@ class TestAdminAPI(TestCase):
|
||||
response = self.client.get(reverse("authentik_api:apps-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_models(self):
|
||||
"""Test models API"""
|
||||
response = self.client.get(reverse("authentik_api:models-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_system(self):
|
||||
"""Test system API"""
|
||||
|
23
authentik/admin/urls.py
Normal file
23
authentik/admin/urls.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""API URLs"""
|
||||
from django.urls import path
|
||||
|
||||
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
|
||||
api_urlpatterns = [
|
||||
("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
|
||||
("admin/apps", AppsViewSet, "apps"),
|
||||
("admin/models", ModelViewSet, "models"),
|
||||
path(
|
||||
"admin/metrics/",
|
||||
AdministrationMetricsViewSet.as_view(),
|
||||
name="admin_metrics",
|
||||
),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
]
|
@ -1,4 +1,5 @@
|
||||
"""API Authentication"""
|
||||
from hmac import compare_digest
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
@ -32,7 +33,17 @@ def validate_auth(header: bytes) -> Optional[str]:
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
user = auth_user_lookup(raw_header)
|
||||
if not user:
|
||||
return None
|
||||
if not user.is_active:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return user
|
||||
|
||||
|
||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
auth_credentials = validate_auth(raw_header)
|
||||
if not auth_credentials:
|
||||
@ -45,8 +56,8 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
CTX_AUTH_VIA.set("api_token")
|
||||
return key_token.user
|
||||
# then try to auth via JWT
|
||||
jwt_token = RefreshToken.filter_not_expired(
|
||||
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
jwt_token = AccessToken.filter_not_expired(
|
||||
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
).first()
|
||||
if jwt_token:
|
||||
# Double-check scopes, since they are saved in a single string
|
||||
@ -68,7 +79,7 @@ def token_secret_key(value: str) -> Optional[User]:
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
|
||||
if value != settings.SECRET_KEY:
|
||||
if not compare_digest(value, settings.SECRET_KEY):
|
||||
return None
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts:
|
||||
|
@ -2,6 +2,43 @@
|
||||
from rest_framework import pagination
|
||||
from rest_framework.response import Response
|
||||
|
||||
PAGINATION_COMPONENT_NAME = "Pagination"
|
||||
PAGINATION_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"next": {
|
||||
"type": "number",
|
||||
},
|
||||
"previous": {
|
||||
"type": "number",
|
||||
},
|
||||
"count": {
|
||||
"type": "number",
|
||||
},
|
||||
"current": {
|
||||
"type": "number",
|
||||
},
|
||||
"total_pages": {
|
||||
"type": "number",
|
||||
},
|
||||
"start_index": {
|
||||
"type": "number",
|
||||
},
|
||||
"end_index": {
|
||||
"type": "number",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"next",
|
||||
"previous",
|
||||
"count",
|
||||
"current",
|
||||
"total_pages",
|
||||
"start_index",
|
||||
"end_index",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class Pagination(pagination.PageNumberPagination):
|
||||
"""Pagination which includes total pages and current page"""
|
||||
@ -35,41 +72,7 @@ class Pagination(pagination.PageNumberPagination):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pagination": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"next": {
|
||||
"type": "number",
|
||||
},
|
||||
"previous": {
|
||||
"type": "number",
|
||||
},
|
||||
"count": {
|
||||
"type": "number",
|
||||
},
|
||||
"current": {
|
||||
"type": "number",
|
||||
},
|
||||
"total_pages": {
|
||||
"type": "number",
|
||||
},
|
||||
"start_index": {
|
||||
"type": "number",
|
||||
},
|
||||
"end_index": {
|
||||
"type": "number",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"next",
|
||||
"previous",
|
||||
"count",
|
||||
"current",
|
||||
"total_pages",
|
||||
"start_index",
|
||||
"end_index",
|
||||
],
|
||||
},
|
||||
"pagination": {"$ref": f"#/components/schemas/{PAGINATION_COMPONENT_NAME}"},
|
||||
"results": schema,
|
||||
},
|
||||
"required": ["pagination", "results"],
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.generators import SchemaGenerator
|
||||
from drf_spectacular.plumbing import (
|
||||
ResolvedComponent,
|
||||
build_array_type,
|
||||
@ -8,6 +9,9 @@ from drf_spectacular.plumbing import (
|
||||
)
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from rest_framework.settings import api_settings
|
||||
|
||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
||||
|
||||
|
||||
def build_standard_type(obj, **kwargs):
|
||||
@ -28,7 +32,7 @@ GENERIC_ERROR = build_object_type(
|
||||
VALIDATION_ERROR = build_object_type(
|
||||
description=_("Validation Error"),
|
||||
properties={
|
||||
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=[],
|
||||
@ -36,7 +40,19 @@ VALIDATION_ERROR = build_object_type(
|
||||
)
|
||||
|
||||
|
||||
def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
|
||||
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA):
|
||||
"""Register a component and return a reference to it."""
|
||||
component = ResolvedComponent(
|
||||
name=name,
|
||||
type=type_,
|
||||
schema=schema,
|
||||
object=name,
|
||||
)
|
||||
generator.registry.register_on_missing(component)
|
||||
return component
|
||||
|
||||
|
||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
|
||||
"""Workaround to set a default response for endpoints.
|
||||
Workaround suggested at
|
||||
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
|
||||
@ -44,19 +60,10 @@ def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
|
||||
<https://github.com/tfranzel/drf-spectacular/issues/101>.
|
||||
"""
|
||||
|
||||
def create_component(name, schema, type_=ResolvedComponent.SCHEMA):
|
||||
"""Register a component and return a reference to it."""
|
||||
component = ResolvedComponent(
|
||||
name=name,
|
||||
type=type_,
|
||||
schema=schema,
|
||||
object=name,
|
||||
)
|
||||
generator.registry.register_on_missing(component)
|
||||
return component
|
||||
create_component(generator, PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA)
|
||||
|
||||
generic_error = create_component("GenericError", GENERIC_ERROR)
|
||||
validation_error = create_component("ValidationError", VALIDATION_ERROR)
|
||||
generic_error = create_component(generator, "GenericError", GENERIC_ERROR)
|
||||
validation_error = create_component(generator, "ValidationError", VALIDATION_ERROR)
|
||||
|
||||
for path in result["paths"].values():
|
||||
for method in path.values():
|
||||
|
@ -7,82 +7,11 @@ API Browser - {{ tenant.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
|
||||
<script>
|
||||
function getCookie(name) {
|
||||
let cookieValue = "";
|
||||
if (document.cookie && document.cookie !== "") {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (let i = 0; i < cookies.length; i++) {
|
||||
const cookie = cookies[i].trim();
|
||||
// Does this cookie string begin with the name we want?
|
||||
if (cookie.substring(0, name.length + 1) === name + "=") {
|
||||
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookieValue;
|
||||
}
|
||||
window.addEventListener('DOMContentLoaded', (event) => {
|
||||
const rapidocEl = document.querySelector('rapi-doc');
|
||||
rapidocEl.addEventListener('before-try', (e) => {
|
||||
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<style>
|
||||
img.logo {
|
||||
width: 100%;
|
||||
padding: 1rem 0.5rem 1.5rem 0.5rem;
|
||||
min-height: 48px;
|
||||
}
|
||||
</style>
|
||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<rapi-doc
|
||||
spec-url="{{ path }}"
|
||||
heading-text=""
|
||||
theme="light"
|
||||
render-style="read"
|
||||
default-schema-tab="schema"
|
||||
primary-color="#fd4b2d"
|
||||
nav-bg-color="#212427"
|
||||
bg-color="#000000"
|
||||
text-color="#000000"
|
||||
nav-text-color="#ffffff"
|
||||
nav-hover-bg-color="#3c3f42"
|
||||
nav-accent-color="#4f5255"
|
||||
nav-hover-text-color="#ffffff"
|
||||
use-path-in-nav-bar="true"
|
||||
nav-item-spacing="relaxed"
|
||||
allow-server-selection="false"
|
||||
show-header="false"
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false">
|
||||
<div slot="nav-logo">
|
||||
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
</div>
|
||||
</rapi-doc>
|
||||
<script>
|
||||
const rapidoc = document.querySelector("rapi-doc");
|
||||
const matcher = window.matchMedia("(prefers-color-scheme: light)");
|
||||
const changer = (ev) => {
|
||||
const style = getComputedStyle(document.documentElement);
|
||||
let bg, text = "";
|
||||
if (matcher.matches) {
|
||||
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
|
||||
text = style.getPropertyValue('--pf-global--Color--300');
|
||||
} else {
|
||||
bg = style.getPropertyValue('--ak-dark-background');
|
||||
text = style.getPropertyValue('--ak-dark-foreground');
|
||||
}
|
||||
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
|
||||
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
|
||||
rapidoc.requestUpdate();
|
||||
};
|
||||
matcher.addEventListener("change", changer);
|
||||
window.addEventListener("load", changer);
|
||||
</script>
|
||||
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
|
||||
{% endblock %}
|
||||
|
@ -1,18 +1,19 @@
|
||||
"""Test API Authentication"""
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from django.utils import timezone
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
@ -36,9 +37,18 @@ class TestAPIAuth(TestCase):
|
||||
|
||||
def test_bearer_valid(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_bearer_valid_deactivated(self):
|
||||
"""Test valid token"""
|
||||
user = create_test_admin_user()
|
||||
user.is_active = False
|
||||
user.save()
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
@ -47,32 +57,36 @@ class TestAPIAuth(TestCase):
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_success(self):
|
||||
"""Test managed outpost"""
|
||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
|
||||
user: User = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
self.assertEqual(user.type, UserTypes.INTERNAL_SERVICE_ACCOUNT)
|
||||
|
||||
def test_jwt_valid(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope=SCOPE_AUTHENTIK_API,
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
|
||||
def test_jwt_missing_scope(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="",
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
|
@ -4,6 +4,7 @@ from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPIDecorators(APITestCase):
|
||||
@ -16,7 +17,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_obj_perm_denied(self):
|
||||
"""Test object perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
)
|
||||
@ -25,7 +26,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_other_perm_denied(self):
|
||||
"""Test other perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
assign_perm("authentik_core.view_application", self.user, app)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
|
@ -1,8 +1,9 @@
|
||||
"""core Configs API"""
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.dispatch import Signal
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
@ -21,6 +22,8 @@ from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
capabilities = Signal()
|
||||
|
||||
|
||||
class Capabilities(models.TextChoices):
|
||||
"""Define capabilities which influence which APIs can/should be used"""
|
||||
@ -29,6 +32,7 @@ class Capabilities(models.TextChoices):
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
IS_ENTERPRISE = "is_enterprise"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
@ -62,14 +66,19 @@ class ConfigView(APIView):
|
||||
"""Get all capabilities this server instance supports"""
|
||||
caps = []
|
||||
deb_test = settings.DEBUG or settings.TEST
|
||||
if path.ismount(settings.MEDIA_ROOT) or deb_test:
|
||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.y_bool("impersonation"):
|
||||
if CONFIG.get_bool("impersonation"):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
||||
caps.append(Capabilities.IS_ENTERPRISE)
|
||||
for _, result in capabilities.send(sender=self):
|
||||
if result:
|
||||
caps.append(result)
|
||||
return caps
|
||||
|
||||
def get_config(self) -> ConfigSerializer:
|
||||
@ -77,17 +86,17 @@ class ConfigView(APIView):
|
||||
return ConfigSerializer(
|
||||
{
|
||||
"error_reporting": {
|
||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
|
||||
"environment": CONFIG.y("error_reporting.environment"),
|
||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||
"enabled": CONFIG.get("error_reporting.enabled"),
|
||||
"sentry_dsn": CONFIG.get("error_reporting.sentry_dsn"),
|
||||
"environment": CONFIG.get("error_reporting.environment"),
|
||||
"send_pii": CONFIG.get("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
||||
"cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
|
||||
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
|
||||
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -1,259 +1,55 @@
|
||||
"""api v3 urls"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern
|
||||
from django.views.decorators.cache import cache_page
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from rest_framework import routers
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.meta import AppsViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
from authentik.api.v3.config import ConfigView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
from authentik.core.api.applications import ApplicationViewSet
|
||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
||||
from authentik.core.api.groups import GroupViewSet
|
||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||
from authentik.events.api.events import EventViewSet
|
||||
from authentik.events.api.notification_mappings import NotificationWebhookMappingViewSet
|
||||
from authentik.events.api.notification_rules import NotificationRuleViewSet
|
||||
from authentik.events.api.notification_transports import NotificationTransportViewSet
|
||||
from authentik.events.api.notifications import NotificationViewSet
|
||||
from authentik.flows.api.bindings import FlowStageBindingViewSet
|
||||
from authentik.flows.api.flows import FlowViewSet
|
||||
from authentik.flows.api.stages import StageViewSet
|
||||
from authentik.flows.views.executor import FlowExecutorView
|
||||
from authentik.flows.views.inspector import FlowInspectorView
|
||||
from authentik.outposts.api.outposts import OutpostViewSet
|
||||
from authentik.outposts.api.service_connections import (
|
||||
DockerServiceConnectionViewSet,
|
||||
KubernetesServiceConnectionViewSet,
|
||||
ServiceConnectionViewSet,
|
||||
)
|
||||
from authentik.policies.api.bindings import PolicyBindingViewSet
|
||||
from authentik.policies.api.policies import PolicyViewSet
|
||||
from authentik.policies.dummy.api import DummyPolicyViewSet
|
||||
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
||||
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.plex.api.source import PlexSourceViewSet
|
||||
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
|
||||
from authentik.sources.saml.api.source import SAMLSourceViewSet
|
||||
from authentik.sources.saml.api.source_connection import UserSAMLSourceConnectionViewSet
|
||||
from authentik.stages.authenticator_duo.api import (
|
||||
AuthenticatorDuoStageViewSet,
|
||||
DuoAdminDeviceViewSet,
|
||||
DuoDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_sms.api import (
|
||||
AuthenticatorSMSStageViewSet,
|
||||
SMSAdminDeviceViewSet,
|
||||
SMSDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_static.api import (
|
||||
AuthenticatorStaticStageViewSet,
|
||||
StaticAdminDeviceViewSet,
|
||||
StaticDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_totp.api import (
|
||||
AuthenticatorTOTPStageViewSet,
|
||||
TOTPAdminDeviceViewSet,
|
||||
TOTPDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageViewSet
|
||||
from authentik.stages.authenticator_webauthn.api import (
|
||||
AuthenticateWebAuthnStageViewSet,
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
WebAuthnDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.captcha.api import CaptchaStageViewSet
|
||||
from authentik.stages.consent.api import ConsentStageViewSet, UserConsentViewSet
|
||||
from authentik.stages.deny.api import DenyStageViewSet
|
||||
from authentik.stages.dummy.api import DummyStageViewSet
|
||||
from authentik.stages.email.api import EmailStageViewSet
|
||||
from authentik.stages.identification.api import IdentificationStageViewSet
|
||||
from authentik.stages.invitation.api import InvitationStageViewSet, InvitationViewSet
|
||||
from authentik.stages.password.api import PasswordStageViewSet
|
||||
from authentik.stages.prompt.api import PromptStageViewSet, PromptViewSet
|
||||
from authentik.stages.user_delete.api import UserDeleteStageViewSet
|
||||
from authentik.stages.user_login.api import UserLoginStageViewSet
|
||||
from authentik.stages.user_logout.api import UserLogoutStageViewSet
|
||||
from authentik.stages.user_write.api import UserWriteStageViewSet
|
||||
from authentik.tenants.api import TenantViewSet
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
router.include_format_suffixes = False
|
||||
|
||||
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
|
||||
router.register("admin/apps", AppsViewSet, basename="apps")
|
||||
_other_urls = []
|
||||
for _authentik_app in get_apps():
|
||||
try:
|
||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||
except (ModuleNotFoundError, ImportError) as exc:
|
||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||
continue
|
||||
if not hasattr(api_urls, "api_urlpatterns"):
|
||||
LOGGER.debug(
|
||||
"App does not define API URLs",
|
||||
app_name=_authentik_app.name,
|
||||
)
|
||||
continue
|
||||
urls: list = getattr(api_urls, "api_urlpatterns")
|
||||
for url in urls:
|
||||
if isinstance(url, URLPattern):
|
||||
_other_urls.append(url)
|
||||
else:
|
||||
router.register(*url)
|
||||
LOGGER.debug(
|
||||
"Mounted API URLs",
|
||||
app_name=_authentik_app.name,
|
||||
)
|
||||
|
||||
router.register("core/authenticated_sessions", AuthenticatedSessionViewSet)
|
||||
router.register("core/applications", ApplicationViewSet)
|
||||
router.register("core/groups", GroupViewSet)
|
||||
router.register("core/users", UserViewSet)
|
||||
router.register("core/user_consent", UserConsentViewSet)
|
||||
router.register("core/tokens", TokenViewSet)
|
||||
router.register("core/tenants", TenantViewSet)
|
||||
|
||||
router.register("outposts/instances", OutpostViewSet)
|
||||
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
|
||||
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||
|
||||
router.register("flows/instances", FlowViewSet)
|
||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||
|
||||
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
|
||||
|
||||
router.register("events/events", EventViewSet)
|
||||
router.register("events/notifications", NotificationViewSet)
|
||||
router.register("events/transports", NotificationTransportViewSet)
|
||||
router.register("events/rules", NotificationRuleViewSet)
|
||||
|
||||
router.register("managed/blueprints", BlueprintInstanceViewSet)
|
||||
|
||||
router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/saml", UserSAMLSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
router.register("sources/saml", SAMLSourceViewSet)
|
||||
router.register("sources/oauth", OAuthSourceViewSet)
|
||||
router.register("sources/plex", PlexSourceViewSet)
|
||||
|
||||
router.register("policies/all", PolicyViewSet)
|
||||
router.register("policies/bindings", PolicyBindingViewSet)
|
||||
router.register("policies/expression", ExpressionPolicyViewSet)
|
||||
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||
router.register("policies/password", PasswordPolicyViewSet)
|
||||
router.register("policies/reputation/scores", ReputationViewSet)
|
||||
router.register("policies/reputation", ReputationPolicyViewSet)
|
||||
|
||||
router.register("providers/all", ProviderViewSet)
|
||||
router.register("providers/ldap", LDAPProviderViewSet)
|
||||
router.register("providers/proxy", ProxyProviderViewSet)
|
||||
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
||||
router.register("providers/saml", SAMLProviderViewSet)
|
||||
|
||||
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
||||
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
||||
|
||||
router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
|
||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
router.register("authenticators/sms", SMSDeviceViewSet)
|
||||
router.register("authenticators/static", StaticDeviceViewSet)
|
||||
router.register("authenticators/totp", TOTPDeviceViewSet)
|
||||
router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
|
||||
router.register(
|
||||
"authenticators/admin/all",
|
||||
AdminDeviceViewSet,
|
||||
basename="admin-device",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/duo",
|
||||
DuoAdminDeviceViewSet,
|
||||
basename="admin-duodevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/sms",
|
||||
SMSAdminDeviceViewSet,
|
||||
basename="admin-smsdevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/static",
|
||||
StaticAdminDeviceViewSet,
|
||||
basename="admin-staticdevice",
|
||||
)
|
||||
router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice")
|
||||
router.register(
|
||||
"authenticators/admin/webauthn",
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
basename="admin-webauthndevice",
|
||||
)
|
||||
|
||||
router.register("stages/all", StageViewSet)
|
||||
router.register("stages/authenticator/duo", AuthenticatorDuoStageViewSet)
|
||||
router.register("stages/authenticator/sms", AuthenticatorSMSStageViewSet)
|
||||
router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet)
|
||||
router.register("stages/authenticator/totp", AuthenticatorTOTPStageViewSet)
|
||||
router.register("stages/authenticator/validate", AuthenticatorValidateStageViewSet)
|
||||
router.register("stages/authenticator/webauthn", AuthenticateWebAuthnStageViewSet)
|
||||
router.register("stages/captcha", CaptchaStageViewSet)
|
||||
router.register("stages/consent", ConsentStageViewSet)
|
||||
router.register("stages/deny", DenyStageViewSet)
|
||||
router.register("stages/email", EmailStageViewSet)
|
||||
router.register("stages/identification", IdentificationStageViewSet)
|
||||
router.register("stages/invitation/invitations", InvitationViewSet)
|
||||
router.register("stages/invitation/stages", InvitationStageViewSet)
|
||||
router.register("stages/password", PasswordStageViewSet)
|
||||
router.register("stages/prompt/prompts", PromptViewSet)
|
||||
router.register("stages/prompt/stages", PromptStageViewSet)
|
||||
router.register("stages/user_delete", UserDeleteStageViewSet)
|
||||
router.register("stages/user_login", UserLoginStageViewSet)
|
||||
router.register("stages/user_logout", UserLogoutStageViewSet)
|
||||
router.register("stages/user_write", UserWriteStageViewSet)
|
||||
|
||||
router.register("stages/dummy", DummyStageViewSet)
|
||||
router.register("policies/dummy", DummyPolicyViewSet)
|
||||
|
||||
urlpatterns = (
|
||||
[
|
||||
path("", APIBrowserView.as_view(), name="schema-browser"),
|
||||
]
|
||||
+ router.urls
|
||||
+ _other_urls
|
||||
+ [
|
||||
path(
|
||||
"admin/metrics/",
|
||||
AdministrationMetricsViewSet.as_view(),
|
||||
name="admin_metrics",
|
||||
),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
path("root/config/", ConfigView.as_view(), name="config"),
|
||||
path(
|
||||
"flows/executor/<slug:flow_slug>/",
|
||||
FlowExecutorView.as_view(),
|
||||
name="flow-executor",
|
||||
),
|
||||
path(
|
||||
"flows/inspector/<slug:flow_slug>/",
|
||||
FlowInspectorView.as_view(),
|
||||
name="flow-inspector",
|
||||
),
|
||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||
]
|
||||
)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Serializer mixin for managed models"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@ -10,7 +11,9 @@ from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
@ -33,15 +36,31 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"""Info about a single blueprint instance file"""
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Ensure the path specified is retrievable"""
|
||||
try:
|
||||
BlueprintInstance(path=path).retrieve()
|
||||
except BlueprintRetrievalFailed as exc:
|
||||
raise ValidationError(exc) from exc
|
||||
"""Ensure the path (if set) specified is retrievable"""
|
||||
if path == "" or path.startswith(OCI_PREFIX):
|
||||
return path
|
||||
files: list[dict] = blueprints_find_dict.delay().get()
|
||||
if path not in [file["path"] for file in files]:
|
||||
raise ValidationError(_("Blueprint file does not exist"))
|
||||
return path
|
||||
|
||||
class Meta:
|
||||
def validate_content(self, content: str) -> str:
|
||||
"""Ensure content (if set) is a valid blueprint"""
|
||||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
|
||||
raise ValidationError(_("Either path or content must be set."))
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = BlueprintInstance
|
||||
fields = [
|
||||
"pk",
|
||||
@ -54,6 +73,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"enabled",
|
||||
"managed_models",
|
||||
"metadata",
|
||||
"content",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"status": {"read_only": True},
|
||||
|
@ -55,11 +55,12 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
def reconcile_blueprints_discover(self):
|
||||
def reconcile_blueprints_discovery(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
|
||||
blueprints_discover.delay()
|
||||
blueprints_discovery.delay()
|
||||
clear_failed_blueprints.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
|
@ -19,10 +19,8 @@ class Command(BaseCommand):
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer(content)
|
||||
valid, logs = importer.validate()
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
for log in logs:
|
||||
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
|
@ -1,12 +1,17 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
from json import dumps, loads
|
||||
from pathlib import Path
|
||||
from json import dumps
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from django.db.models import Model
|
||||
from drf_jsonschema_serializer.convert import field_to_converter
|
||||
from rest_framework.fields import Field, JSONField, UUIDField
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import registry
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -16,21 +21,146 @@ class Command(BaseCommand):
|
||||
|
||||
schema: dict
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"required": ["version", "entries"],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1,
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [],
|
||||
},
|
||||
},
|
||||
},
|
||||
"$defs": {},
|
||||
}
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
path = Path(__file__).parent.joinpath("./schema_template.json")
|
||||
with open(path, "r", encoding="utf-8") as _template_file:
|
||||
self.schema = loads(_template_file.read())
|
||||
self.set_model_allowed()
|
||||
self.stdout.write(dumps(self.schema, indent=4))
|
||||
self.build()
|
||||
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
|
||||
|
||||
def set_model_allowed(self):
|
||||
"""Set model enum"""
|
||||
model_names = []
|
||||
@staticmethod
|
||||
def json_default(value: Any) -> Any:
|
||||
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
|
||||
return str(value)
|
||||
|
||||
def build(self):
|
||||
"""Build all models into the schema"""
|
||||
for model in registry.get_models():
|
||||
if not is_model_allowed(model):
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class = model.serializer()
|
||||
else:
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
model_instance: Model = model()
|
||||
if not isinstance(model_instance, SerializerModel):
|
||||
continue
|
||||
serializer_class = model_instance.serializer
|
||||
serializer = serializer_class(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: False,
|
||||
}
|
||||
)
|
||||
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
|
||||
self.schema["properties"]["entries"]["items"]["oneOf"].append(
|
||||
self.template_entry(model_path, serializer)
|
||||
)
|
||||
|
||||
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
|
||||
"""Template entry for a single model"""
|
||||
model_schema = self.to_jsonschema(serializer)
|
||||
model_schema["required"] = []
|
||||
def_name = f"model_{model_path}"
|
||||
def_path = f"#/$defs/{def_name}"
|
||||
self.schema["$defs"][def_name] = model_schema
|
||||
return {
|
||||
"type": "object",
|
||||
"required": ["model", "identifiers"],
|
||||
"properties": {
|
||||
"model": {"const": model_path},
|
||||
"id": {"type": "string"},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["absent", "present", "created"],
|
||||
"default": "present",
|
||||
},
|
||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||
"attrs": {"$ref": def_path},
|
||||
"identifiers": {"$ref": def_path},
|
||||
},
|
||||
}
|
||||
|
||||
def field_to_jsonschema(self, field: Field) -> dict:
|
||||
"""Convert a single field to json schema"""
|
||||
if isinstance(field, Serializer):
|
||||
result = self.to_jsonschema(field)
|
||||
else:
|
||||
try:
|
||||
converter = field_to_converter[field]
|
||||
result = converter.convert(field)
|
||||
except KeyError:
|
||||
if isinstance(field, JSONField):
|
||||
result = {"type": "object", "additionalProperties": True}
|
||||
elif isinstance(field, UUIDField):
|
||||
result = {"type": "string", "format": "uuid"}
|
||||
else:
|
||||
raise
|
||||
if field.label:
|
||||
result["title"] = field.label
|
||||
if field.help_text:
|
||||
result["description"] = field.help_text
|
||||
return self.clean_result(result)
|
||||
|
||||
def clean_result(self, result: dict) -> dict:
|
||||
"""Remove enumNames from result, recursively"""
|
||||
result.pop("enumNames", None)
|
||||
for key, value in result.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = self.clean_result(value)
|
||||
return result
|
||||
|
||||
def to_jsonschema(self, serializer: Serializer) -> dict:
|
||||
"""Convert serializer to json schema"""
|
||||
properties = {}
|
||||
required = []
|
||||
for name, field in serializer.fields.items():
|
||||
if field.read_only:
|
||||
continue
|
||||
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
|
||||
model_names.sort()
|
||||
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names
|
||||
sub_schema = self.field_to_jsonschema(field)
|
||||
if field.required:
|
||||
required.append(name)
|
||||
properties[name] = sub_schema
|
||||
|
||||
result = {"type": "object", "properties": properties}
|
||||
if required:
|
||||
result["required"] = required
|
||||
return result
|
||||
|
@ -1,105 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "http://example.com/example.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"default": {},
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"labels": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$id": "#entry",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"placeholder"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Commonly available field, may not exist on all models"
|
||||
}
|
||||
},
|
||||
"default": {},
|
||||
"additionalProperties": true
|
||||
},
|
||||
"identifiers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"properties": {
|
||||
"pk": {
|
||||
"description": "Commonly available field, may not exist on all models",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -6,7 +6,6 @@ from pathlib import Path
|
||||
import django.contrib.postgres.fields
|
||||
from dacite.core import from_dict
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from yaml import load
|
||||
@ -15,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
"""Check if blueprint should be imported"""
|
||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
@ -31,7 +30,7 @@ def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path:
|
||||
return
|
||||
blueprint_file.seek(0)
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
|
||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
meta = from_dict(BlueprintMetadata, metadata)
|
||||
@ -46,7 +45,7 @@ def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path:
|
||||
enabled=True,
|
||||
managed_models=[],
|
||||
last_applied_hash="",
|
||||
metadata=metadata,
|
||||
metadata=metadata or {},
|
||||
)
|
||||
instance.save()
|
||||
|
||||
@ -56,7 +55,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||
|
||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||
@ -71,7 +70,6 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [("authentik_flows", "0001_initial")]
|
||||
@ -86,7 +84,12 @@ class Migration(migrations.Migration):
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and"
|
||||
" updated automatically. This is flag only indicates that an object can"
|
||||
" be overwritten by migrations. You can still modify the objects via"
|
||||
" the API, but expect changes to be overwritten in a later update."
|
||||
),
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
|
@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.1.5 on 2023-01-10 19:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="blueprintinstance",
|
||||
name="content",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="path",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
]
|
@ -0,0 +1,31 @@
|
||||
# Generated by Django 4.1.7 on 2023-04-28 10:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.lib.migrations import fallback_names
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0002_blueprintinstance_content"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="name",
|
||||
field=models.TextField(unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
]
|
@ -1,58 +1,43 @@
|
||||
"""blueprint models"""
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX, BlueprintOCIClient, OCIException
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class BlueprintRetrievalFailed(SentryIgnoredException):
|
||||
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
|
||||
"""Error raised when we are unable to fetch the blueprint contents, whether it be HTTP files
|
||||
not being accessible or local files not being readable"""
|
||||
|
||||
|
||||
class ManagedModel(models.Model):
|
||||
"""Model which can be managed by authentik exclusively"""
|
||||
"""Model that can be managed by authentik exclusively"""
|
||||
|
||||
managed = models.TextField(
|
||||
default=None,
|
||||
null=True,
|
||||
verbose_name=_("Managed by authentik"),
|
||||
help_text=_(
|
||||
(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
)
|
||||
"Objects that are managed by authentik. These objects are created and updated "
|
||||
"automatically. This flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
@ -72,9 +57,10 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
|
||||
name = models.TextField()
|
||||
name = models.TextField(unique=True)
|
||||
metadata = models.JSONField(default=dict)
|
||||
path = models.TextField()
|
||||
path = models.TextField(default="", blank=True)
|
||||
content = models.TextField(default="", blank=True)
|
||||
context = models.JSONField(default=dict)
|
||||
last_applied = models.DateTimeField(auto_now=True)
|
||||
last_applied_hash = models.TextField()
|
||||
@ -86,60 +72,32 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
def retrieve_oci(self) -> str:
|
||||
"""Get blueprint from an OCI registry"""
|
||||
url = urlparse(self.path)
|
||||
ref = "latest"
|
||||
path = url.path[1:]
|
||||
if ":" in url.path:
|
||||
path, _, ref = path.partition(":")
|
||||
client = NewClient(
|
||||
f"https://{url.hostname}",
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(url.username, url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
|
||||
manifest_request = client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
|
||||
try:
|
||||
manifest_response = client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
manifests = client.fetch_manifests()
|
||||
return client.fetch_blobs(manifests)
|
||||
except OCIException as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise BlueprintRetrievalFailed(manifest["errors"])
|
||||
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
|
||||
if not blob:
|
||||
raise BlueprintRetrievalFailed("Blob not found")
|
||||
|
||||
blob_request = client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
def retrieve_file(self) -> str:
|
||||
"""Get blueprint from path"""
|
||||
try:
|
||||
blob_response = client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
base = Path(CONFIG.get("blueprints_dir"))
|
||||
full_path = base.joinpath(Path(self.path)).resolve()
|
||||
if not str(full_path).startswith(str(base.resolve())):
|
||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
except (IOError, OSError) as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve(self) -> str:
|
||||
"""Retrieve blueprint contents"""
|
||||
if self.path.startswith("oci://"):
|
||||
if self.path.startswith(OCI_PREFIX):
|
||||
return self.retrieve_oci()
|
||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
if self.path != "":
|
||||
return self.retrieve_file()
|
||||
return self.content
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
@ -151,7 +109,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
|
@ -5,8 +5,13 @@ from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"blueprints_v1_cleanup": {
|
||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""Blueprint helpers"""
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
@ -45,13 +44,3 @@ def reconcile_app(app_name: str):
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||
fixture = _fixture.read()
|
||||
try:
|
||||
return fixture % kwargs
|
||||
except TypeError:
|
||||
return fixture
|
||||
|
52
authentik/blueprints/tests/fixtures/conditional_fields.yaml
vendored
Normal file
52
authentik/blueprints/tests/fixtures/conditional_fields.yaml
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
version: 1
|
||||
metadata:
|
||||
name: test conditional fields
|
||||
labels:
|
||||
blueprints.goauthentik.io/description: |
|
||||
Some models have conditional fields that are only allowed in blueprint contexts
|
||||
- Token (key)
|
||||
- Application (icon)
|
||||
- Source (icon)
|
||||
- Flow (background)
|
||||
entries:
|
||||
- model: authentik_core.token
|
||||
identifiers:
|
||||
identifier: "%(uid)s-token"
|
||||
attrs:
|
||||
key: "%(uid)s"
|
||||
user: "%(user)s"
|
||||
intent: api
|
||||
- model: authentik_core.application
|
||||
identifiers:
|
||||
slug: "%(uid)s-app"
|
||||
attrs:
|
||||
name: "%(uid)s-app"
|
||||
icon: https://goauthentik.io/img/icon.png
|
||||
- model: authentik_sources_oauth.oauthsource
|
||||
identifiers:
|
||||
slug: "%(uid)s-source"
|
||||
attrs:
|
||||
name: "%(uid)s-source"
|
||||
provider_type: azuread
|
||||
consumer_key: "%(uid)s"
|
||||
consumer_secret: "%(uid)s"
|
||||
icon: https://goauthentik.io/img/icon.png
|
||||
- model: authentik_flows.flow
|
||||
identifiers:
|
||||
slug: "%(uid)s-flow"
|
||||
attrs:
|
||||
name: "%(uid)s-flow"
|
||||
title: "%(uid)s-flow"
|
||||
designation: authentication
|
||||
background: https://goauthentik.io/img/icon.png
|
||||
- model: authentik_core.user
|
||||
identifiers:
|
||||
username: "%(uid)s"
|
||||
attrs:
|
||||
name: "%(uid)s"
|
||||
password: "%(uid)s"
|
||||
- model: authentik_core.user
|
||||
identifiers:
|
||||
username: "%(uid)s-no-password"
|
||||
attrs:
|
||||
name: "%(uid)s"
|
@ -7,7 +7,5 @@ entries:
|
||||
state: absent
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
expression: |
|
||||
return True
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
state: absent
|
||||
|
@ -4,6 +4,7 @@ entries:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
name: qwerweqrq
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
|
56
authentik/blueprints/tests/fixtures/tags.yaml
vendored
56
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@ -3,6 +3,14 @@ context:
|
||||
foo: bar
|
||||
policy_property: name
|
||||
policy_property_value: foo-bar-baz-qux
|
||||
sequence:
|
||||
- foo
|
||||
- bar
|
||||
mapping:
|
||||
key1: value
|
||||
key2: 2
|
||||
context1: context-nested-value
|
||||
context2: !Context context1
|
||||
entries:
|
||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%s", present]
|
||||
@ -19,7 +27,7 @@ entries:
|
||||
[slug, default-source-authentication],
|
||||
]
|
||||
enrollment_flow:
|
||||
!Find [authentik_flows.Flow, [slug, default-source-enrollment]]
|
||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
@ -28,6 +36,7 @@ entries:
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
env_null: !Env [bar-baz, null]
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
@ -91,7 +100,52 @@ entries:
|
||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||
]
|
||||
if_true_simple: !If [!Context foo, true, text]
|
||||
if_short: !If [!Context foo]
|
||||
if_false_simple: !If [null, false, 2]
|
||||
enumerate_mapping_to_mapping: !Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
||||
]
|
||||
enumerate_mapping_to_sequence: !Enumerate [
|
||||
!Context mapping,
|
||||
SEQ,
|
||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_sequence: !Enumerate [
|
||||
!Context sequence,
|
||||
SEQ,
|
||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_mapping: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[!Format ["index: %d", !Index 0], !Value 0]
|
||||
]
|
||||
nested_complex_enumeration: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[
|
||||
!Index 0,
|
||||
!Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[
|
||||
!Format ["%s", !Index 0],
|
||||
[
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
||||
{
|
||||
outer_value: !Value 1,
|
||||
outer_index: !Index 1,
|
||||
middle_value: !Value 0,
|
||||
middle_index: !Index 0
|
||||
}
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
nested_context: !Context context2
|
||||
identifiers:
|
||||
name: test
|
||||
conditions:
|
||||
|
@ -1,34 +1,15 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for app in apps.get_app_configs():
|
||||
if not app.label.startswith("authentik"):
|
||||
continue
|
||||
for model in app.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
|
||||
def test_retrieve_file(self):
|
||||
"""Test retrieve_file"""
|
||||
instance = BlueprintInstance.objects.create(name=generate_id(), path="../etc/hosts")
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
instance.retrieve()
|
||||
|
@ -2,7 +2,8 @@
|
||||
from django.test import TransactionTestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.blueprints.models import OCI_MEDIA_TYPE, BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
|
||||
|
||||
|
||||
class TestBlueprintOCI(TransactionTestCase):
|
||||
@ -31,6 +32,29 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
"foo",
|
||||
)
|
||||
|
||||
def test_successful_port(self):
|
||||
"""Successful retrieval with custom port"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io:1234/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io:1234/v2/goauthentik/blueprints/test/blobs/foo", text="foo")
|
||||
|
||||
self.assertEqual(
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io:1234/goauthentik/blueprints/test:latest"
|
||||
).retrieve(),
|
||||
"foo",
|
||||
)
|
||||
|
||||
def test_manifests_error(self):
|
||||
"""Test manifests request erroring"""
|
||||
with Mocker() as mocker:
|
||||
|
@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/90-default-tenant.yaml")
|
||||
@apply_blueprint("default/default-tenant.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
|
34
authentik/blueprints/tests/test_serializer_models.py
Normal file
34
authentik/blueprints/tests/test_serializer_models.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for app in apps.get_app_configs():
|
||||
if not app.label.startswith("authentik"):
|
||||
continue
|
||||
for model in app.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
|
@ -3,12 +3,12 @@ from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
@ -24,18 +24,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
@ -59,11 +55,9 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
@ -119,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
@ -136,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
@ -161,9 +155,67 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_short": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"nested_context": "context-nested-value",
|
||||
"env_null": None,
|
||||
}
|
||||
)
|
||||
).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
@ -207,15 +259,21 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
with transaction_rollback():
|
||||
# First stage fields
|
||||
username_prompt = Prompt.objects.create(
|
||||
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||
name=generate_id(),
|
||||
field_key="username",
|
||||
label="Username",
|
||||
order=0,
|
||||
type=FieldTypes.TEXT,
|
||||
)
|
||||
password = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password",
|
||||
label="Password",
|
||||
order=1,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
password_repeat = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password_repeat",
|
||||
label="Password (repeat)",
|
||||
order=2,
|
||||
|
@ -43,3 +43,39 @@ class TestBlueprintsV1API(APITestCase):
|
||||
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
|
||||
def test_api_oci(self):
|
||||
"""Test validation with OCI path"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={"name": "foo", "path": "oci://foo/bar"},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
def test_api_blank(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
|
||||
)
|
||||
|
||||
def test_api_content(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
"content": '{"version": 3}',
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||
)
|
||||
|
59
authentik/blueprints/tests/test_v1_conditional_fields.py
Normal file
59
authentik/blueprints/tests/test_v1_conditional_fields.py
Normal file
@ -0,0 +1,59 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.core.models import Application, Token, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
|
||||
|
||||
class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
||||
"""Test Blueprints conditional fields"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
user = create_test_admin_user()
|
||||
self.uid = generate_id()
|
||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
def test_token(self):
|
||||
"""Test token"""
|
||||
token = Token.objects.filter(identifier=f"{self.uid}-token").first()
|
||||
self.assertIsNotNone(token)
|
||||
self.assertEqual(token.key, self.uid)
|
||||
|
||||
def test_application(self):
|
||||
"""Test application"""
|
||||
app = Application.objects.filter(slug=f"{self.uid}-app").first()
|
||||
self.assertIsNotNone(app)
|
||||
self.assertEqual(app.meta_icon, "https://goauthentik.io/img/icon.png")
|
||||
|
||||
def test_source(self):
|
||||
"""Test source"""
|
||||
source = OAuthSource.objects.filter(slug=f"{self.uid}-source").first()
|
||||
self.assertIsNotNone(source)
|
||||
self.assertEqual(source.icon, "https://goauthentik.io/img/icon.png")
|
||||
|
||||
def test_flow(self):
|
||||
"""Test flow"""
|
||||
flow = Flow.objects.filter(slug=f"{self.uid}-flow").first()
|
||||
self.assertIsNotNone(flow)
|
||||
self.assertEqual(flow.background, "https://goauthentik.io/img/icon.png")
|
||||
|
||||
def test_user(self):
|
||||
"""Test user"""
|
||||
user: User = User.objects.filter(username=self.uid).first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertTrue(user.check_password(self.uid))
|
||||
|
||||
def test_user_null(self):
|
||||
"""Test user"""
|
||||
user: User = User.objects.filter(username=f"{self.uid}-no-password").first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertFalse(user.has_usable_password())
|
@ -1,10 +1,10 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
@ -14,7 +14,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test conditions fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
import_yaml = load_fixture(
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
@ -31,7 +31,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test conditions not fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
import_yaml = load_fixture(
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
@ -13,7 +13,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_present(self):
|
||||
"""Test state present"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -39,7 +39,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_created(self):
|
||||
"""Test state created"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -65,7 +65,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_absent(self):
|
||||
"""Test state absent"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -74,7 +74,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
@ -6,7 +6,7 @@ from django.test import TransactionTestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -53,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
@ -81,7 +81,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
@ -106,7 +106,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
|
6
authentik/blueprints/urls.py
Normal file
6
authentik/blueprints/urls.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""API URLs"""
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("managed/blueprints", BlueprintInstanceViewSet),
|
||||
]
|
@ -1,13 +1,15 @@
|
||||
"""transfer common classes"""
|
||||
from collections import OrderedDict
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||
from enum import Enum
|
||||
from functools import reduce
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Literal, Optional, Union
|
||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.fields import Field
|
||||
@ -64,11 +66,13 @@ class BlueprintEntry:
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
id: Optional[str] = None
|
||||
|
||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||
|
||||
def __post_init__(self, *args, **kwargs) -> None:
|
||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||
@ -85,17 +89,46 @@ class BlueprintEntry:
|
||||
attrs=all_attrs,
|
||||
)
|
||||
|
||||
def _get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
raise ValueError("depth must be a positive number or zero")
|
||||
|
||||
if context_tag_type:
|
||||
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
|
||||
else:
|
||||
contexts = self.__tag_contexts
|
||||
|
||||
try:
|
||||
return contexts[-(depth + 1)]
|
||||
except IndexError:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
val = copy(value)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.append(value)
|
||||
|
||||
if isinstance(value, YAMLTag):
|
||||
return value.resolve(self, blueprint)
|
||||
val = value.resolve(self, blueprint)
|
||||
|
||||
if isinstance(value, dict):
|
||||
for key, inner_value in value.items():
|
||||
value[key] = self.tag_resolver(inner_value, blueprint)
|
||||
val[key] = self.tag_resolver(inner_value, blueprint)
|
||||
if isinstance(value, list):
|
||||
for idx, inner_value in enumerate(value):
|
||||
value[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
return value
|
||||
val[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.pop()
|
||||
|
||||
return val
|
||||
|
||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
@ -145,12 +178,19 @@ class YAMLTag:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class YAMLTagContext:
|
||||
"""Base class for all YAML Tag Contexts"""
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag context logic"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class KeyOf(YAMLTag):
|
||||
"""Reference another object by their ID"""
|
||||
|
||||
id_from: str
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.id_from = node.value
|
||||
@ -177,18 +217,17 @@ class Env(YAMLTag):
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
self.key = loader.construct_object(node.value[0])
|
||||
self.default = loader.construct_object(node.value[1])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return getenv(self.key, self.default)
|
||||
return getenv(self.key) or self.default
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
@ -197,20 +236,21 @@ class Context(YAMLTag):
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
self.key = loader.construct_object(node.value[0])
|
||||
self.default = loader.construct_object(node.value[1])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
value = self.default
|
||||
if self.key in blueprint.context:
|
||||
value = blueprint.context[self.key]
|
||||
if isinstance(value, YAMLTag):
|
||||
return value.resolve(entry, blueprint)
|
||||
return value
|
||||
|
||||
|
||||
@ -220,10 +260,9 @@ class Format(YAMLTag):
|
||||
format_string: str
|
||||
args: list[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = node.value[0].value
|
||||
self.format_string = loader.construct_object(node.value[0])
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
@ -245,15 +284,12 @@ class Format(YAMLTag):
|
||||
class Find(YAMLTag):
|
||||
"""Find any object"""
|
||||
|
||||
model_name: str
|
||||
model_name: str | YAMLTag
|
||||
conditions: list[list]
|
||||
|
||||
model_class: type[Model]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.model_name = node.value[0].value
|
||||
self.model_class = apps.get_model(*self.model_name.split("."))
|
||||
self.model_name = loader.construct_object(node.value[0])
|
||||
self.conditions = []
|
||||
for raw_node in node.value[1:]:
|
||||
values = []
|
||||
@ -262,6 +298,13 @@ class Find(YAMLTag):
|
||||
self.conditions.append(values)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.model_name, YAMLTag):
|
||||
model_name = self.model_name.resolve(entry, blueprint)
|
||||
else:
|
||||
model_name = self.model_name
|
||||
|
||||
model_class = apps.get_model(*model_name.split("."))
|
||||
|
||||
query = Q()
|
||||
for cond in self.conditions:
|
||||
if isinstance(cond[0], YAMLTag):
|
||||
@ -273,7 +316,7 @@ class Find(YAMLTag):
|
||||
else:
|
||||
query_value = cond[1]
|
||||
query &= Q(**{query_key: query_value})
|
||||
instance = self.model_class.objects.filter(query).first()
|
||||
instance = model_class.objects.filter(query).first()
|
||||
if instance:
|
||||
return instance.pk
|
||||
return None
|
||||
@ -296,10 +339,9 @@ class Condition(YAMLTag):
|
||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||
}
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = node.value[0].value
|
||||
self.mode = loader.construct_object(node.value[0])
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
@ -329,12 +371,15 @@ class If(YAMLTag):
|
||||
when_true: Any
|
||||
when_false: Any
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.condition = loader.construct_object(node.value[0])
|
||||
self.when_true = loader.construct_object(node.value[1])
|
||||
self.when_false = loader.construct_object(node.value[2])
|
||||
if len(node.value) == 1:
|
||||
self.when_true = True
|
||||
self.when_false = False
|
||||
else:
|
||||
self.when_true = loader.construct_object(node.value[1])
|
||||
self.when_false = loader.construct_object(node.value[2])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.condition, YAMLTag):
|
||||
@ -351,6 +396,133 @@ class If(YAMLTag):
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
"""Iterate over an iterable."""
|
||||
|
||||
iterable: YAMLTag | Iterable
|
||||
item_body: Any
|
||||
output_body: Literal["SEQ", "MAP"]
|
||||
|
||||
_OUTPUT_BODIES = {
|
||||
"SEQ": (list, lambda a, b: [*a, b]),
|
||||
"MAP": (
|
||||
dict,
|
||||
lambda a, b: always_merger.merge(
|
||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = loader.construct_object(node.value[1])
|
||||
self.item_body = loader.construct_object(node.value[2])
|
||||
self.__current_context: tuple[Any, Any] = tuple()
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return self.__current_context
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
iterable = self.iterable.resolve(entry, blueprint)
|
||||
else:
|
||||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
iterable = tuple(iterable.items())
|
||||
else:
|
||||
iterable = tuple(enumerate(iterable))
|
||||
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
self.__current_context = tuple()
|
||||
|
||||
try:
|
||||
for item in iterable:
|
||||
self.__current_context = item
|
||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class EnumeratedItem(YAMLTag):
|
||||
"""Get the current item value and index provided by an Enumerate tag context"""
|
||||
|
||||
depth: int
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
context_tag: Enumerate = entry._get_tag_context(
|
||||
depth=self.depth,
|
||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
||||
class Index(EnumeratedItem):
|
||||
"""Get the current item index provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
"""Get the current item value provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
"""Dump dataclasses to yaml"""
|
||||
|
||||
@ -394,6 +566,9 @@ class BlueprintLoader(SafeLoader):
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
|
||||
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
|
@ -7,6 +7,7 @@ from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
@ -34,11 +35,16 @@ from authentik.core.models import (
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
@ -157,7 +163,12 @@ class Importer:
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||
serializer = serializer_class(
|
||||
data=entry.get_attrs(self.__import),
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -181,14 +192,14 @@ class Importer:
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
existing_models = model.objects.filter(query)
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
|
||||
self.logger.debug("instance exists, skipping")
|
||||
return None
|
||||
self.logger.debug(
|
||||
"initialise serializer with instance",
|
||||
model=model,
|
||||
@ -199,7 +210,9 @@ class Importer:
|
||||
serializer_kwargs["partial"] = True
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance", model=model, **updated_identifiers
|
||||
"initialised new serializer instance",
|
||||
model=model,
|
||||
**cleanse_dict(updated_identifiers),
|
||||
)
|
||||
model_instance = model()
|
||||
# pk needs to be set on the model instance otherwise a new one will be generated
|
||||
@ -213,7 +226,12 @@ class Importer:
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||
serializer: Serializer = model().serializer(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
**serializer_kwargs,
|
||||
)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -231,8 +249,7 @@ class Importer:
|
||||
raise IntegrityError
|
||||
except IntegrityError:
|
||||
return False
|
||||
else:
|
||||
self.logger.debug("Committing changes")
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
@ -251,21 +268,34 @@ class Importer:
|
||||
try:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
# For deleting objects we don't need the serializer to be valid
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
|
||||
continue
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
]:
|
||||
model = serializer.save()
|
||||
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]:
|
||||
instance = serializer.instance
|
||||
if (
|
||||
instance
|
||||
and not instance._state.adding
|
||||
and state == BlueprintEntryDesiredState.CREATED
|
||||
):
|
||||
self.logger.debug(
|
||||
"instance exists, skipping",
|
||||
model=model,
|
||||
instance=instance,
|
||||
pk=instance.pk,
|
||||
)
|
||||
else:
|
||||
instance = serializer.save()
|
||||
self.logger.debug("updated model", model=instance)
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||
entry._state = BlueprintEntryState(model)
|
||||
self.logger.debug("updated model", model=model)
|
||||
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
||||
entry._state = BlueprintEntryState(instance)
|
||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||
instance: Optional[Model] = serializer.instance
|
||||
if instance.pk:
|
||||
@ -282,7 +312,7 @@ class Importer:
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, []
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
@ -292,5 +322,6 @@ class Importer:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.debug("Finished blueprint import validation")
|
||||
self.__import = orig_import
|
||||
return successful, logs
|
||||
|
@ -3,3 +3,4 @@
|
||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
||||
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"
|
||||
|
@ -31,7 +31,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
required = attrs["required"]
|
||||
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
||||
if not instance and required:
|
||||
raise ValidationError("Required blueprint does not exist")
|
||||
raise ValidationError({"identifiers": "Required blueprint does not exist"})
|
||||
self.blueprint_instance = instance
|
||||
return super().validate(attrs)
|
||||
|
||||
@ -56,5 +56,4 @@ class MetaApplyBlueprint(BaseMetaModel):
|
||||
return ApplyBlueprintMetaSerializer
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
@ -14,7 +14,6 @@ class BaseMetaModel(Model):
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
|
104
authentik/blueprints/v1/oci.py
Normal file
104
authentik/blueprints/v1/oci.py
Normal file
@ -0,0 +1,104 @@
|
||||
"""OCI Client"""
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from structlog import get_logger
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
OCI_PREFIX = "oci://"
|
||||
|
||||
|
||||
class OCIException(SentryIgnoredException):
|
||||
"""OCI-related errors"""
|
||||
|
||||
|
||||
class BlueprintOCIClient:
|
||||
"""Blueprint OCI Client"""
|
||||
|
||||
url: ParseResult
|
||||
sanitized_url: str
|
||||
logger: BoundLogger
|
||||
ref: str
|
||||
client: NewClient
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
self._parse_url(url)
|
||||
self.logger = get_logger().bind(url=self.sanitized_url)
|
||||
|
||||
self.ref = "latest"
|
||||
# Remove the leading slash of the path to convert it to an image name
|
||||
path = self.url.path[1:]
|
||||
if ":" in path:
|
||||
# if there's a colon in the path, use everything after it as a ref
|
||||
path, _, self.ref = path.partition(":")
|
||||
base_url = f"https://{self.url.hostname}"
|
||||
if self.url.port:
|
||||
base_url += f":{self.url.port}"
|
||||
self.client = NewClient(
|
||||
base_url,
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(self.url.username, self.url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
|
||||
def _parse_url(self, url: str):
|
||||
self.url = urlparse(url)
|
||||
netloc = self.url.netloc
|
||||
if "@" in netloc:
|
||||
netloc = netloc[netloc.index("@") + 1 :]
|
||||
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
|
||||
|
||||
def fetch_manifests(self) -> dict[str, Any]:
|
||||
"""Fetch manifests for ref"""
|
||||
self.logger.info("Fetching OCI manifests for blueprint")
|
||||
manifest_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(self.ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
try:
|
||||
manifest_response = self.client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise OCIException(manifest["errors"])
|
||||
return manifest
|
||||
|
||||
def fetch_blobs(self, manifest: dict[str, Any]):
|
||||
"""Fetch blob based on manifest info"""
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
self.logger.debug("Found layer with matching media type", blob=blob)
|
||||
if not blob:
|
||||
raise OCIException("Blob not found")
|
||||
|
||||
blob_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
try:
|
||||
blob_response = self.client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
@ -28,6 +28,7 @@ from authentik.blueprints.models import (
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
@ -61,7 +62,7 @@ def start_blueprint_watcher():
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
@ -76,10 +77,10 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discover.delay()
|
||||
blueprints_discovery.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
@ -100,8 +101,11 @@ def blueprints_find_dict():
|
||||
def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.y("blueprints_dir"))
|
||||
for path in root.glob("**/*.yaml"):
|
||||
root = Path(CONFIG.get("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
@ -122,7 +126,7 @@ def blueprints_find():
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.info(
|
||||
LOGGER.debug(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
@ -134,7 +138,7 @@ def blueprints_find():
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discover(self: MonitoredTask):
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
@ -181,9 +185,9 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
instance: Optional[BlueprintInstance] = None
|
||||
try:
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||
self.set_uid(slugify(instance.name))
|
||||
if not instance or not instance.enabled:
|
||||
return
|
||||
self.set_uid(slugify(instance.name))
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer(blueprint_content, instance.context)
|
||||
@ -219,3 +223,14 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def clear_failed_blueprints():
|
||||
"""Remove blueprints which couldn't be fetched"""
|
||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX):
|
||||
try:
|
||||
blueprint.retrieve()
|
||||
except BlueprintRetrievalFailed:
|
||||
blueprint.delete()
|
||||
|
@ -1,15 +1,17 @@
|
||||
"""Application API Views"""
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -21,6 +23,7 @@ from structlog.testing import capture_logs
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import Application, User
|
||||
@ -35,7 +38,6 @@ from authentik.lib.utils.file import (
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -50,6 +52,9 @@ class ApplicationSerializer(ModelSerializer):
|
||||
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
|
||||
backchannel_providers_obj = ProviderSerializer(
|
||||
source="backchannel_providers", required=False, read_only=True, many=True
|
||||
)
|
||||
|
||||
meta_icon = ReadOnlyField(source="get_meta_icon")
|
||||
|
||||
@ -60,8 +65,12 @@ class ApplicationSerializer(ModelSerializer):
|
||||
user = self.context["request"].user
|
||||
return app.get_launch_url(user)
|
||||
|
||||
class Meta:
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["icon"] = CharField(source="meta_icon", required=False)
|
||||
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = [
|
||||
"pk",
|
||||
@ -69,6 +78,8 @@ class ApplicationSerializer(ModelSerializer):
|
||||
"slug",
|
||||
"provider",
|
||||
"provider_obj",
|
||||
"backchannel_providers",
|
||||
"backchannel_providers_obj",
|
||||
"launch_url",
|
||||
"open_in_new_tab",
|
||||
"meta_launch_url",
|
||||
@ -80,6 +91,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
]
|
||||
extra_kwargs = {
|
||||
"meta_icon": {"read_only": True},
|
||||
"backchannel_providers": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
@ -185,10 +197,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if superuser_full_list and request.user.is_superuser:
|
||||
return super().list(request)
|
||||
|
||||
# To prevent the user from having to double login when prompt is set to login
|
||||
# and the user has just signed it. This session variable is set in the UserLoginStage
|
||||
# and is (quite hackily) removed from the session in applications's API's List method
|
||||
self.request.session.pop(USER_LOGIN_AUTHENTICATED, None)
|
||||
queryset = self._filter_queryset_for_list(self.get_queryset())
|
||||
self.paginate_queryset(queryset)
|
||||
|
||||
@ -225,7 +233,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set application icon"""
|
||||
app: Application = self.get_object()
|
||||
@ -245,7 +252,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set application icon (as URL)"""
|
||||
app: Application = self.get_object()
|
||||
@ -254,15 +260,14 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
def metrics(self, request: Request, slug: str):
|
||||
"""Metrics for application logins"""
|
||||
app = self.get_object()
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION,
|
||||
context__authorized_application__pk=app.pk.hex,
|
||||
)
|
||||
.get_events_per_hour()
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
@ -74,7 +74,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = AuthenticatedSession
|
||||
fields = [
|
||||
"uuid",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user