Compare commits
1504 Commits
version/20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
3925f5a208 | |||
6add4a62b9 | |||
54d5aa20ba | |||
b99ac01228 | |||
15026748d1 | |||
2739376a2a | |||
152121175b | |||
1d57a258f3 | |||
f15cac39c8 | |||
ce77d82b24 | |||
c3fe57197d | |||
267938d435 | |||
6a7c2e0662 | |||
5336afb1b4 | |||
9bb44055a3 | |||
143663d293 | |||
bd54d034e1 | |||
be85eecac5 | |||
24385c9c68 | |||
e141a11475 | |||
b055adec2a | |||
772acb10d6 | |||
a7bf963409 | |||
317afc932a | |||
5e5a74eebf | |||
fa87519536 | |||
0deaf25b1f | |||
47d5fc26cc | |||
9a996e7176 | |||
554a26442d | |||
573517bf0a | |||
2cd68dfa87 | |||
8029a13be1 | |||
6900ffffd8 | |||
873aaf85f9 | |||
9c69f67778 | |||
6cf7a72831 | |||
7e3b325929 | |||
b916b612c7 | |||
b7c5fc3f1e | |||
a3ac5ec183 | |||
d30379ba93 | |||
12815526c1 | |||
ed2f0a2d5e | |||
536d776d02 | |||
f70d6432e7 | |||
cc08bfb18b | |||
79dcc30778 | |||
68a1bcf233 | |||
cd7de4c0b9 | |||
3195a75b9a | |||
886d7832df | |||
a3595a36d2 | |||
28ac00798c | |||
f4b0d6e85c | |||
daa3c91afc | |||
5eba598584 | |||
a6b16ecc68 | |||
a41924939b | |||
0afd3b121e | |||
a58374f065 | |||
8faec99bd6 | |||
6c27a2f783 | |||
78e4c313b5 | |||
f8140e1543 | |||
4405a04b0b | |||
7eb5fa6a24 | |||
216e8aff64 | |||
c91bbecb7b | |||
084b3eb039 | |||
557aadecc0 | |||
33b8c45f5c | |||
52132112f6 | |||
ff1510dedc | |||
c3398004ff | |||
47f09ac285 | |||
259c87fa37 | |||
80bb6c6274 | |||
f8c2fc2ead | |||
ea84ee0222 | |||
a695ffc224 | |||
9e22f007a8 | |||
6299fc7f81 | |||
a032fd529b | |||
ec78e56fbd | |||
ad7dedb61f | |||
8356ceaead | |||
228197ea5e | |||
27d3d4a534 | |||
f2dcbf9b1d | |||
03e39a6557 | |||
454a09d91e | |||
61434c807d | |||
7265a56f05 | |||
95df14106c | |||
91d78b0c7d | |||
6c492fbeee | |||
f7ef8c89c2 | |||
c6c460fb48 | |||
78ecbc097c | |||
7fc350bb0b | |||
3bada52fd6 | |||
847fe6ddee | |||
312f09204b | |||
d76c823268 | |||
c8e074c363 | |||
906faf9cce | |||
c68a42f63b | |||
fd8c1d41db | |||
3704f4ccf4 | |||
eb071d4d90 | |||
1c04dc0986 | |||
639a5c429c | |||
35bae56486 | |||
0a8de6499c | |||
f164fff2e7 | |||
51a56942bc | |||
92fd6a55db | |||
b5b1ed5887 | |||
8ccdbdc370 | |||
ac57d6e820 | |||
eaa3d11df8 | |||
bb0eea1f39 | |||
87f9f85c6d | |||
4728a444b7 | |||
4d58eba027 | |||
35fa8ca3d0 | |||
cf07e930b8 | |||
afd155bbba | |||
0b0beecb49 | |||
0644a5ee3a | |||
f3b4e55af5 | |||
9c25d72d61 | |||
ad2d38fa4a | |||
b1b0cf8a87 | |||
f47b208433 | |||
b958868ea7 | |||
5fd414576b | |||
9d9616138f | |||
99e2c6911c | |||
0fa3fbf416 | |||
5ea54e8f7e | |||
8215ee19c6 | |||
9bddc9b577 | |||
c10a8ecf51 | |||
7acd0558f5 | |||
9f4be4d150 | |||
af9766972d | |||
9efc06e473 | |||
d1566acb4b | |||
3c964a3e71 | |||
01cfec62e7 | |||
cebef6a596 | |||
5fe372e84d | |||
d5a3a7552a | |||
ab17f37f0b | |||
ee883ceccc | |||
7df0e88b9d | |||
53f827b54f | |||
395dc08f05 | |||
080f2ab5e7 | |||
2a2e159a0d | |||
564b2874a9 | |||
8ded11806a | |||
36bd4b1e51 | |||
95a679ab3b | |||
5ca8eefa8b | |||
b0f5c9b010 | |||
6ae9071368 | |||
ab795e6642 | |||
b7b62ba089 | |||
7f0ccc61dd | |||
d5abaed66a | |||
64d611212e | |||
9e9769d7fb | |||
5aa744edca | |||
0a7e2e9f81 | |||
f43c0bc798 | |||
ffd3924095 | |||
ed275bce4a | |||
b99ce890ef | |||
5509bce3d7 | |||
a3f1e7a4d1 | |||
17fb4dab34 | |||
c0f3b56012 | |||
53415d8af8 | |||
ed99b3d98f | |||
6373dd2053 | |||
3f607ee2c8 | |||
da6e74a353 | |||
9b879989fe | |||
b1508b9d01 | |||
4601864f94 | |||
a2994218e4 | |||
0ae53b1ce8 | |||
d5fa9da444 | |||
91da421391 | |||
a1e67377f9 | |||
5ad379f54f | |||
0be95d377a | |||
4da66cdb6b | |||
a28b888ca4 | |||
5ec008d0d3 | |||
b06dbab4ac | |||
ab4d7ba2f0 | |||
ea806daf3e | |||
27e5f45919 | |||
8b17ab9bb0 | |||
9283e02808 | |||
d6b5359b8b | |||
77657b1f33 | |||
131a43033e | |||
fef841a458 | |||
bb8b87fcb3 | |||
f36a5a053f | |||
cc8f52b502 | |||
0b0e08446d | |||
1913b5ec41 | |||
a8332eced6 | |||
af7cc8d42d | |||
5830781a5a | |||
a7f324b96f | |||
494cfc2fea | |||
3af27323de | |||
8a6febaa02 | |||
ecce31ee87 | |||
967a38b7ac | |||
9d1ad104ec | |||
01663468de | |||
5e7731a4aa | |||
cb0fa6beb9 | |||
6f67366dfa | |||
8b7922a5cd | |||
dea44fc74d | |||
dfe8a98849 | |||
54d508ae8c | |||
7b0d8f8991 | |||
b058906074 | |||
4b0566c9d1 | |||
40dfa920e2 | |||
187d5e9b4c | |||
147312c160 | |||
4426cbec34 | |||
e05f028c0a | |||
58a5c69f49 | |||
8c7c60b271 | |||
d8c243bcd2 | |||
f7cc4349d7 | |||
13591fc72c | |||
b604ff5114 | |||
f72fa41a75 | |||
adf4191066 | |||
d2de586cc9 | |||
dad5021870 | |||
ab3f993bb9 | |||
158fe2f9bb | |||
5970a6e2a2 | |||
5c8f024d12 | |||
428daa5323 | |||
4001af4d35 | |||
f1cec03dcf | |||
574ed72b95 | |||
480f5c2aac | |||
d4e502fdf5 | |||
05b2fb5ec1 | |||
bb92c4a967 | |||
b40caf12df | |||
8ebd2d14b4 | |||
445bc05b67 | |||
7538b2f860 | |||
367f86ecfb | |||
055ead54b5 | |||
df0232358b | |||
baa3ea6585 | |||
e75e2cf324 | |||
948b83a2b2 | |||
34e9af57fe | |||
94ae490284 | |||
690f263bac | |||
6280446450 | |||
7d87f86410 | |||
0d1201f972 | |||
78b23c4bd4 | |||
7fcfc48af2 | |||
611fd96e3a | |||
4671d4afb4 | |||
07c4ef986b | |||
7d64ec5066 | |||
ee6edec1d8 | |||
04cc7817ee | |||
9ac6511548 | |||
2eee53806a | |||
c5af79f176 | |||
0477862b73 | |||
5ef5213fae | |||
6a554ef45a | |||
f44175303b | |||
dfa80543b5 | |||
5f99887b50 | |||
2502a7cece | |||
77025cdb79 | |||
ce5f6d5d43 | |||
1893626e04 | |||
edb2aa2db5 | |||
9e539d0a0e | |||
a3088b7f79 | |||
b186e35b61 | |||
2a3933f141 | |||
2f2eec0d21 | |||
9faad8a055 | |||
a94392808f | |||
c4998e7dd4 | |||
1ab587d80e | |||
5715ffd845 | |||
8c3834e6b2 | |||
f841586153 | |||
b8b681250f | |||
3ab9ee5acc | |||
1a4c640835 | |||
38bf0ee740 | |||
520fb2fac1 | |||
95adc38ff4 | |||
55ad2d7eab | |||
8160663214 | |||
6a700cb376 | |||
e123afd9ee | |||
96e732e45b | |||
6349ab60e7 | |||
2b0749af6b | |||
a5098364eb | |||
71820191a3 | |||
c08c849fec | |||
6a74fa11c6 | |||
7841720acf | |||
67644ace87 | |||
f84a10b59b | |||
200d6d6adf | |||
d0f1ebfad3 | |||
7d849d7bd7 | |||
f1dfe04786 | |||
4d7d2b8d3a | |||
a6cc0f189c | |||
18a4eac527 | |||
6dd2e2b85f | |||
7bfea87864 | |||
1ca8feb5fc | |||
c1615d044b | |||
edc9d60e22 | |||
e6b135d535 | |||
8cfad9a854 | |||
2237358633 | |||
d15cd9ce5f | |||
62abe22673 | |||
8b78570597 | |||
549e4dcb94 | |||
1480ff6732 | |||
0e1000764d | |||
8dc9b43bb5 | |||
3ce0aa54c7 | |||
b5888e79f5 | |||
25d779e879 | |||
d1fbb85821 | |||
ea307689d4 | |||
7a06c1685b | |||
977757f561 | |||
c117d98e27 | |||
711e98d049 | |||
f84c176bd0 | |||
c4b11ca861 | |||
132a353b92 | |||
bb464aad50 | |||
ab27cd0a9a | |||
241280f2b5 | |||
d110b5b661 | |||
8871a4acb2 | |||
a1ad357abd | |||
81f9842797 | |||
712256cdfe | |||
fb4808418c | |||
7c7bb9dc2e | |||
9a3809135e | |||
de13265997 | |||
0228ea9a4c | |||
faf986c231 | |||
315eae009f | |||
02f75a92ce | |||
a92786e153 | |||
157c23946e | |||
f6b33d65af | |||
ce461631b5 | |||
2f106a9049 | |||
7038431e19 | |||
3fd9b53fe6 | |||
e542783fec | |||
adcd11b1f8 | |||
6192d01b7e | |||
fd2677af1f | |||
5947c7b97e | |||
986d7bf714 | |||
6282e923d6 | |||
88b4125a6a | |||
208c2d1913 | |||
54dc0a46b4 | |||
fc807744bf | |||
9666d407b4 | |||
75510ead84 | |||
73bf6fd530 | |||
2e5a33f0c2 | |||
8c33d13dff | |||
a70de69228 | |||
ab2d39dd2a | |||
2084156f1d | |||
1d2725825c | |||
b9754f9c13 | |||
bb2e5b4861 | |||
89abc99dc0 | |||
f92c661d09 | |||
3468afc399 | |||
a286ae276b | |||
4fdd978b57 | |||
c52bd8c4b9 | |||
ca5ae5f914 | |||
4604c92046 | |||
4218ece2a5 | |||
0d6481c4d5 | |||
a7fc579202 | |||
5600261852 | |||
824737965d | |||
5476f517da | |||
d38043fe72 | |||
102570c61a | |||
238e6e3f24 | |||
89c7e61769 | |||
b097cf4d7e | |||
5c0d7f9a58 | |||
95b99e3e55 | |||
6437fbc814 | |||
d6fa19a97f | |||
1957717160 | |||
94a93adb4b | |||
5d84f2a079 | |||
5b9f35a4a1 | |||
b3dd87bbab | |||
af7189953c | |||
35d2e9cd5f | |||
9a52d8db83 | |||
14f0034a0a | |||
20522558fe | |||
f00ee5c174 | |||
95e24c9ec2 | |||
6b42e404bf | |||
9abd4b3e14 | |||
865138e7e7 | |||
7524413b22 | |||
70bdbfd5ef | |||
73a7c0c559 | |||
cafff808ab | |||
bbbbc2a718 | |||
1452f2680a | |||
dd39aab1fb | |||
524fbd5838 | |||
bb7c3456fa | |||
b611fd10a2 | |||
65b1cbc010 | |||
119f64159b | |||
1352ed7e44 | |||
34ce85fcd1 | |||
977ae4f225 | |||
a464ffe846 | |||
6757d43d33 | |||
da3222df07 | |||
54cacd784c | |||
32840d3909 | |||
eb78632853 | |||
4868d4a14d | |||
3f5effb1bc | |||
84c2da8a6e | |||
56744659e4 | |||
bad7deb52a | |||
5748e19845 | |||
16a03160d0 | |||
a566856b65 | |||
8b52d711e8 | |||
4da18b5f0c | |||
63e3f6545b | |||
e35c3d19bc | |||
ef028af7d1 | |||
b69c26d485 | |||
e13cfec84f | |||
97df7848a5 | |||
e2d3a95c80 | |||
bebf18f257 | |||
53e68b8540 | |||
9dbd54690c | |||
9e41b7d208 | |||
1c66d420c4 | |||
0ca913f8d4 | |||
b97274058c | |||
aef0333695 | |||
c847b16b3e | |||
e2e83f5631 | |||
8363016982 | |||
397b9845ec | |||
b9da24c952 | |||
1053962bec | |||
19ff8129e5 | |||
40cdf6877d | |||
2a399cf8e8 | |||
345fa1bed6 | |||
70ffb6d49e | |||
3ecdcebd35 | |||
4f02c8ab98 | |||
41974c3f82 | |||
808f697423 | |||
a9dc3ff0d8 | |||
acde584cbd | |||
df52116135 | |||
eaf56f4f3f | |||
fd9293e3e8 | |||
520de8d5b0 | |||
bbdb0df42e | |||
9310d4cdc0 | |||
86f9056d3f | |||
5375637eda | |||
109f06c3ae | |||
a3744da3a5 | |||
ff1feb653b | |||
4a11d89a08 | |||
73d7b5f110 | |||
8b7a92068b | |||
ff1532da13 | |||
6eafa2346d | |||
681644b854 | |||
de4d388e0a | |||
cbe2cb51e7 | |||
9176c71075 | |||
1c05e4ca09 | |||
2d55d3c743 | |||
0a9482b28a | |||
4b1440944e | |||
75794defc6 | |||
59a92dbacd | |||
b81ddf2b80 | |||
9ccd1ce08b | |||
6f6d22da13 | |||
095850f038 | |||
b46048e74f | |||
bf7dc5df78 | |||
f0d0abb66e | |||
fab6a8f8c9 | |||
61bf73d2f9 | |||
9219abf84b | |||
178bfe1d44 | |||
afb7f8be3e | |||
ba08060337 | |||
26243c05ed | |||
56375d7245 | |||
94f22cffba | |||
10b7d78825 | |||
7618c2e45f | |||
e13615c1ae | |||
06fb81410b | |||
5732fc0c2e | |||
59e54901fb | |||
0ef333f8ea | |||
12ef7e2fae | |||
397d6ff059 | |||
6469698261 | |||
3b733e98fa | |||
9e855d1f0e | |||
a2ee76b328 | |||
86bb2afd02 | |||
9b8c0e3924 | |||
d11ee46589 | |||
b6b820f6f1 | |||
e28f897cb1 | |||
964f095630 | |||
610012fcc3 | |||
3c970a135c | |||
24d7cebbe7 | |||
618527b51c | |||
4afcc240a3 | |||
26308ef62b | |||
36ed62142d | |||
6ae2fc9668 | |||
34f01d3731 | |||
67f3db1e03 | |||
36f92f01de | |||
f19c143e95 | |||
9559bc2e1e | |||
41d17dc543 | |||
885aeddbdc | |||
033e315035 | |||
4539954173 | |||
f54351fd57 | |||
b61655fe4f | |||
2a74f5e91f | |||
7ea3fd6482 | |||
c02e2c22ff | |||
d834ec4db9 | |||
f6a8b3d568 | |||
c4a7648ce3 | |||
c0144c9bc1 | |||
28ddeb124f | |||
dbc07f55f4 | |||
0d7c2d8269 | |||
879ea8ed62 | |||
54c76735e2 | |||
b6f5fed121 | |||
e08536af33 | |||
2c32e54746 | |||
9370d155f8 | |||
e47bbe63b8 | |||
972dce1462 | |||
7b44d8972f | |||
ba9cafecc5 | |||
e0f4f2c80a | |||
e715f1fbbb | |||
dde9d02008 | |||
a6eba37d5a | |||
2eb7c16a9a | |||
87fa50c492 | |||
9042664fcf | |||
e8a53041cc | |||
20e971f5ce | |||
6f2f4f4aa3 | |||
66e8748503 | |||
b5c8fa24a2 | |||
335e124c0a | |||
1faf3c66c7 | |||
7810063ca0 | |||
980320e24b | |||
118765ab30 | |||
5e60db8593 | |||
e81a065855 | |||
39d0893303 | |||
99ddbf553c | |||
799b509ac6 | |||
bc6b591dfb | |||
ef6a799533 | |||
26de143bf8 | |||
ad46b3f05c | |||
0462afe964 | |||
1100b98596 | |||
89ccdfcf6e | |||
a328d2d68a | |||
d5a94ea687 | |||
596ff529c4 | |||
612d1c76d4 | |||
886749dcb2 | |||
26f3275361 | |||
6441401d94 | |||
b7e4ad7234 | |||
9c82024fd5 | |||
685709decb | |||
6ab2afc8d4 | |||
23ad132f74 | |||
87164f5cdb | |||
d6056755b3 | |||
36229f4224 | |||
80f4fccd35 | |||
c6a14fa4f1 | |||
c6235e0f1e | |||
7c946c1cbe | |||
664d8646bb | |||
1aba27c84f | |||
008729700d | |||
9e1cedbece | |||
7503b32c74 | |||
383b6a38ba | |||
7d9eef37ed | |||
60d3da20f3 | |||
cd99b6e48f | |||
51c6a14786 | |||
75866406dc | |||
122055b38b | |||
e68e6cb666 | |||
b61d181ec7 | |||
c4e24c04f6 | |||
47e663f48c | |||
1f7178c3a8 | |||
cfa2edebcf | |||
175502b053 | |||
2c78053631 | |||
53c03f3635 | |||
6c72c97513 | |||
5748b29c03 | |||
87ee4635b2 | |||
9e82de33e6 | |||
8829f76183 | |||
f6165bac8f | |||
84bd6131a1 | |||
6d1de4bbd9 | |||
5a8fbc2f95 | |||
d2cfb76a7c | |||
f70be86ddc | |||
f5eb414d14 | |||
327d87355d | |||
b415e9b773 | |||
b203de7a26 | |||
ee65877956 | |||
c5097bfc5a | |||
febb6f57bd | |||
843cbd4674 | |||
a4a82bd041 | |||
ca2a59281a | |||
6f1721a728 | |||
99baf1a29e | |||
a68fa06ff9 | |||
9f431396c0 | |||
1ac2e924a2 | |||
0874574e5c | |||
069e9c015b | |||
8de4471322 | |||
c6ead3dc49 | |||
f749027143 | |||
153bd3aaf1 | |||
e19c4886fe | |||
1a57d453ba | |||
e5dfe7dafe | |||
bb190852a5 | |||
34a2d105d3 | |||
80601e16f9 | |||
ff6f9cc44f | |||
e0f60e09cf | |||
176aa606ca | |||
17364c3bd8 | |||
d842fc4958 | |||
19f5e6e07e | |||
acfa9c76d1 | |||
bff34cc5dc | |||
7f009f6d02 | |||
dfb9ae548c | |||
7d6b573f8b | |||
ade397fc24 | |||
d945d30cda | |||
c8c401e2c5 | |||
e4ca20bfc6 | |||
6347716815 | |||
859b6cc60e | |||
06a1a7f076 | |||
b6c120f555 | |||
6cc363bc5d | |||
80de3ee853 | |||
c340830b37 | |||
472b9ea866 | |||
4c5b07a091 | |||
fbd00bf5f8 | |||
a598276aa0 | |||
deb91bd12b | |||
92b8cf1b64 | |||
58e001c3d5 | |||
54ac920f81 | |||
fc9ae9e938 | |||
81d70e5d41 | |||
6bb809fd82 | |||
1a55c3de2b | |||
16b59f3e65 | |||
de0e710378 | |||
4a6f3330ed | |||
d4d8ab2757 | |||
950a107a13 | |||
aadeba46f2 | |||
6f2f53994c | |||
faabc62e5c | |||
ec42b597ab | |||
925477b3a2 | |||
cefc1a57ee | |||
53b25d61f7 | |||
1240ed6c6d | |||
3740b45e1a | |||
eb8181daa0 | |||
6a25cf6c5d | |||
be42e5562d | |||
19d2fcb542 | |||
06bdfd7f04 | |||
3c6cb5a2d1 | |||
2467754d0b | |||
97d71758a4 | |||
d4dce5b250 | |||
cf36da2e5d | |||
8f70354e3c | |||
4f868c2ef2 | |||
b69e55eae9 | |||
21e29744c2 | |||
e490d25791 | |||
c5870fcab2 | |||
8850446bc2 | |||
10b9878f03 | |||
f28834ffad | |||
a2947975e4 | |||
0735e35a1d | |||
27638d6a92 | |||
8de92943ab | |||
99bb4c2cf8 | |||
b225f6f3ff | |||
ecd5fab082 | |||
b85b5d8c3b | |||
8068e27eaf | |||
af43330fd6 | |||
1f88330133 | |||
64aa385f90 | |||
cae04b8198 | |||
1be792fbd8 | |||
6a64d6b536 | |||
243be8f7bb | |||
a7cf454760 | |||
5aa43eeb04 | |||
9398fbcf9e | |||
d010ab7d43 | |||
0891d9df6a | |||
b976d3e874 | |||
b42f46bad4 | |||
8e17dfbaee | |||
ef2f7779df | |||
afacfe5734 | |||
91db046978 | |||
5acffc2bb6 | |||
c5a935474b | |||
89417ce455 | |||
ec9085ff06 | |||
c6e638ddc2 | |||
7a85038c11 | |||
3170b2f92c | |||
555b33c252 | |||
f5047e3ab0 | |||
f6070e33f5 | |||
3a53e60af7 | |||
602a9df97b | |||
425e68c874 | |||
0bbe74e2bf | |||
61b06eff06 | |||
1919a9dd77 | |||
b3323c5d00 | |||
fd9ce531c9 | |||
ed01a844ef | |||
c6843a1307 | |||
3d21ea1df4 | |||
00a16bee76 | |||
b9a2323c51 | |||
66aabcc371 | |||
de389e2385 | |||
52a0b19f7e | |||
33f547c870 | |||
0767107730 | |||
c960601a1e | |||
146d54813c | |||
388367785d | |||
a890b93869 | |||
8cf0a5dace | |||
547c01f481 | |||
fabb03ca69 | |||
352a2057dd | |||
698780dfda | |||
93a19cfda0 | |||
e3244c1d21 | |||
7488fca4b4 | |||
d1fd56b757 | |||
ada44d84f8 | |||
7d4ce41e12 | |||
798245b8db | |||
ccf956d5c6 | |||
f98b5b651b | |||
cadb710c38 | |||
095cb75d6c | |||
2113029a14 | |||
c590cb86cf | |||
0c7b0c7526 | |||
ef8bbb93ae | |||
068e70a0d7 | |||
3f108a0ed8 | |||
e9687b8cbe | |||
54a98379ba | |||
1de546eaec | |||
dbf2bd5aba | |||
f2386f126e | |||
ffc97905f3 | |||
dbd324f202 | |||
98b7597fde | |||
18cfe67719 | |||
456da49f36 | |||
6e499a2733 | |||
40463088d2 | |||
9394a76962 | |||
e5ba5d51fe | |||
55782d3929 | |||
47f4b4247c | |||
7551995c6e | |||
c6aa95f1c7 | |||
a9ee43791d | |||
27231fd5d7 | |||
eb60bba0d5 | |||
5044f8ce6d | |||
13cc7ca77b | |||
6d207b0b0a | |||
6345f99aa0 | |||
fcc47038e4 | |||
95551a753a | |||
19b3d648a4 | |||
1bbe402281 | |||
ac48c59b5a | |||
c05d6b96a2 | |||
b99929cf16 | |||
72168fae29 | |||
96eeb91493 | |||
c2b4d14af5 | |||
627e8a250e | |||
ecb1ce8135 | |||
6e5ad60cea | |||
3651fb5daf | |||
fa23d307aa | |||
dbf21796ff | |||
55a74bdd7d | |||
9cdb601b91 | |||
93e665949f | |||
5144a9af4c | |||
93772e492b | |||
f97b65d44b | |||
ee70ebfb10 | |||
5631a99f00 | |||
5ea9595c9c | |||
1b6f920265 | |||
3bf8c915d5 | |||
1cc578be66 | |||
36f8f8bae5 | |||
68058fb2ae | |||
0abbe8288e | |||
a9de9101ca | |||
19ed9dc5e9 | |||
b5ae712f35 | |||
b1c01b53e7 | |||
5b31e47573 | |||
d2a9a294f2 | |||
32c7c58518 | |||
349c6e50c2 | |||
2e8027fa04 | |||
0bc1b33663 | |||
8564f9ef87 | |||
7cfd84d8f0 | |||
f2e40ec7e3 | |||
1f1255a829 | |||
53b65a9d1a | |||
9437e2d3ab | |||
6a7b78abc2 | |||
9c24e5195b | |||
306b046b9a | |||
10b50c5845 | |||
3912a57df2 | |||
08142d393f | |||
0f7af256d6 | |||
16076cc46f | |||
8aa16e66e4 | |||
a9b32e2f97 | |||
b2d272bf6f | |||
31ef6fb6a6 | |||
c9c059a008 | |||
a5e84b5482 | |||
7c697e09f3 | |||
b99afd82b2 | |||
9397598376 | |||
91ffe4e7f9 | |||
430a207865 | |||
894873b373 | |||
1ce2a1b846 | |||
4731ccfafe | |||
3e9c28d0a4 | |||
12d4394d73 | |||
b872e7072d | |||
b0ea657b18 | |||
a5f26b2ce0 | |||
c1b9b5c5e2 | |||
b288393cd4 | |||
767ffc09d0 | |||
446dc0a17b | |||
c85474ec37 | |||
3a59b75f4a | |||
8deac81364 | |||
98485c528e | |||
1a5b626f96 | |||
5736a1542c | |||
43854dc828 | |||
64af78110a | |||
59be3c7746 | |||
9e30f01fce | |||
fc8fe5317a | |||
92090ced9f | |||
ce47d4cf39 | |||
c11367553e | |||
c61529e4d4 | |||
8709f3300c | |||
e78bc1b32f | |||
89c4a7b4a4 | |||
9701907b82 | |||
7ac73bfcf9 | |||
1423d5d45b | |||
768ff67e8c | |||
b13deefd91 | |||
69e445211e | |||
ada8fc2a55 | |||
d9cc45f9ce | |||
515a402db7 | |||
813f70b806 | |||
a302a72379 | |||
e390f5b2d1 | |||
f09305a444 | |||
60189ce9ca | |||
fdc445e6a1 | |||
e3f8afcf80 | |||
9e2e8132a6 | |||
26f9bbeefa | |||
49b6c71079 | |||
97acc77e0a | |||
eb1e0427c1 | |||
6e0c9acb34 | |||
b75d659707 | |||
8894861a59 | |||
7878755acd | |||
2b62d6646e | |||
4f81f750ce | |||
fa216e2e93 | |||
181bd903be | |||
23c69c456a | |||
c73fce4f58 | |||
bd0ef69ece | |||
19ee98b36d | |||
75d4246b79 | |||
d2fd84d98c | |||
678378403b | |||
7f32d0eb9a | |||
f1b3598a0f | |||
07767c9376 | |||
5a3f9d1417 | |||
44a6303c91 | |||
5f7f80fdee | |||
a332a465ef | |||
8b16fed926 | |||
be10dd629b | |||
a6a868cbc1 | |||
a9ed275f4e | |||
fbc5378158 | |||
20210b614d | |||
063877a615 | |||
a73d50d379 | |||
9568f4dbd6 | |||
9b2ceb0d44 | |||
2deb185550 | |||
69d4719687 | |||
d31e566873 | |||
0ddcefce80 | |||
4c45d35507 | |||
829e49275d | |||
143309448e | |||
1f038ecee2 | |||
1b1f2ea72c | |||
6e1a54753e | |||
67d1f06c91 | |||
d37de6bc00 | |||
8deced771d | |||
c380512cc8 | |||
e0b06bc4de | |||
1bd6107ec7 | |||
ce1409fb6c | |||
b6b97f4706 | |||
cd12e177ea | |||
31c6ea9fda | |||
20931ccc1d | |||
9c9f441cff | |||
36822c128c | |||
29d3fdaa1d | |||
ac5167b8a3 | |||
0db434a922 | |||
3c0675486c | |||
f6d56e7e29 | |||
fac56390a0 | |||
c6e3229f0b | |||
ace30933bd | |||
d313f1576b | |||
ac07576676 | |||
df42480284 | |||
d2f722f032 | |||
a8fdcab927 | |||
0cba3c7788 | |||
0d414ec0ea | |||
c42b34a46b | |||
7a1050300d | |||
a64e87a6b1 | |||
81e9f2d608 | |||
ddbd8153e2 | |||
f7037b9f33 | |||
67a6fa6399 | |||
a35b8f5862 | |||
5b7c6f1b0e | |||
662101fd1f | |||
3f633460a8 | |||
be2d1a522a | |||
d6f5b8e421 | |||
b424c5dd27 | |||
2a83d79ace | |||
1ed24a5eef | |||
f2961cb536 | |||
4d66e42708 | |||
bd3a721753 | |||
25c3086d7a | |||
1bdd09342a | |||
ad6d773d26 | |||
b555ccd549 | |||
9445354b31 | |||
a42f2f7217 | |||
d1aa1f46da | |||
a1be924fa4 | |||
db60427e21 | |||
d3e2f41561 | |||
8840f6ef63 | |||
3b103b22e2 | |||
158f4c1c4c | |||
42606a499b | |||
c0841120bf | |||
61442a7e4a | |||
98876df5c5 | |||
a9680d6088 | |||
7eb6320d74 | |||
47aba4a996 | |||
643b36b732 | |||
001869641d | |||
bec538c543 | |||
c63ba3f378 | |||
0fb2b5550a | |||
762294c0f9 | |||
2a2ab94e97 | |||
53cab07a48 | |||
2604dc14fe | |||
06f67c738c | |||
1b001060a3 | |||
a960ce9454 | |||
439bdc54d6 | |||
e6b5810e03 | |||
89b73a4d89 | |||
ed3f36e72a | |||
78b711ec9d | |||
ac07833688 | |||
4be0a707b1 | |||
1e73b42c58 | |||
3df3bceccb | |||
a4370458cb | |||
742bad4080 | |||
be473470a4 | |||
445cd5b2c4 | |||
805a4b766a | |||
730139e43c | |||
24e8915e0a | |||
f15946e216 | |||
b54415dcde | |||
471293ba25 | |||
3e7320734c | |||
3131e557d9 | |||
1efc7eecbf | |||
15ec6a9284 | |||
dc1359a763 | |||
1e01e9813d | |||
119a268eb7 | |||
e887a315be | |||
c4bb51469b | |||
6edc043775 | |||
4379f5bc8e | |||
1ad56f4a13 | |||
f54e82781a | |||
e334d8ab00 | |||
e1c0f74152 | |||
e8f850285e | |||
4b93f40c5e | |||
57400925a4 | |||
ffed653cae | |||
ba5cd6e719 | |||
9564894eda | |||
042cd0b2cb | |||
049a97a800 | |||
aa6668f8cb | |||
13a129bb01 | |||
0974f58367 | |||
1d59bfd16e | |||
ebd73ec34f | |||
0629dee23b | |||
2dc0792d9e | |||
fde848ee51 | |||
e9d52282b7 | |||
c810628fe3 | |||
de0a5191f7 | |||
f6794829e4 | |||
475853fb14 | |||
1c1319927e | |||
964fdf171b | |||
93e20bce2e | |||
960a2aab74 | |||
2cae6596eb | |||
11b1eb4173 | |||
ee615c2d22 | |||
aef9a22331 | |||
3980eea7c6 | |||
d6d72489a7 | |||
9fdfb8c99b | |||
b9bb27008e | |||
82184b2882 | |||
f90a52c7d6 | |||
9ea0441559 | |||
5cab280759 | |||
a03a64b35c | |||
780b986be8 | |||
9d422918b3 | |||
b548ccca6e | |||
2c42c87689 | |||
8262a47455 | |||
bd56922a2f | |||
5b68942b23 | |||
c8bd0fbb1c | |||
c99798b1f2 | |||
316c6966b7 | |||
6a44695c48 | |||
c46b2d5573 | |||
68b58fb73c | |||
97513467ad | |||
35678c18c5 | |||
5fba08c911 | |||
1149a61986 | |||
7a10872854 | |||
4d1bcd2e19 | |||
8a1b6693a7 | |||
90c89aec76 | |||
b429e24392 | |||
3073b7d7e3 | |||
e02b99bfbc | |||
6d86067cea | |||
ce5d1fd80d | |||
a6755bea71 | |||
4cce99b207 | |||
d1287aa7c9 | |||
bcbd6f7243 | |||
39424839c5 | |||
2d03bd5c89 | |||
4d527a0ac5 | |||
b1020fde64 | |||
ff13b4bb46 | |||
f0e121c064 | |||
89a3f7d004 | |||
e6aa4c9327 | |||
2b2323fae7 | |||
a148e611f3 | |||
b56fd5e745 | |||
24eb4ed963 | |||
0e6400bfea | |||
be308b3392 | |||
62aa4336a8 | |||
b16d1134ea | |||
78f7eb4345 | |||
1615723f10 | |||
f9b46145de | |||
20a4dfd13d | |||
4a6f8d2ef2 | |||
ffdc1aa9c2 | |||
138801c18b | |||
8f3579ba45 | |||
3eecc76717 | |||
0488d36257 | |||
340bf54315 | |||
b33f3d9cc8 | |||
dbaf03430e | |||
f5738804ff | |||
bfa0360764 | |||
ae13fc3b92 | |||
7046944bf6 | |||
0423023d2e | |||
5132f0f876 | |||
7e44de2da9 | |||
08b0075335 | |||
efbab9e37f | |||
8195e6d4ff | |||
700a4cb72c | |||
94b9ebb0bb | |||
fe1e2aa8af | |||
7835f3d873 | |||
4a50c65cad | |||
283c93c57b | |||
1b86a3d5d6 | |||
8b710b57a5 | |||
716584bbae | |||
9dc0bb2a77 | |||
debbcb125b | |||
2d827eaae1 | |||
47d79ac28c | |||
61f2b73255 | |||
9f846d94be | |||
84fbeb5721 | |||
01da8e1792 | |||
6a3a3e5f8d | |||
42c278b4f8 | |||
e49bc83266 | |||
98b7ebec74 | |||
ccb43a3dfb | |||
c92b2620f5 | |||
e2bfeefc8b | |||
e52c964354 | |||
c635487210 | |||
ca6cd8a4d3 | |||
fb09df26c9 | |||
30f4a09a88 | |||
7143ea08e6 | |||
e4e7a112e3 | |||
4c133b957c | |||
28eb7c03fa | |||
7b01a208a2 | |||
db0af3763b | |||
ab9efcea77 | |||
d280577830 | |||
36da29aaa2 | |||
9e1204b645 | |||
ea2f69a8f8 | |||
55a705e777 | |||
cb10289b68 | |||
423776c7a2 | |||
e5cfddfc57 | |||
1564b898db | |||
3b61c6f9b9 | |||
042865c606 | |||
7f662ac2f3 | |||
e9f5d7aefe | |||
609f95ac97 | |||
0181a90d98 | |||
243f335718 | |||
f4990bb5da | |||
980d2a022c | |||
81fdd097c6 | |||
2b4c9657a6 | |||
45d30213b3 | |||
7884ff07bb | |||
bacf2afed1 | |||
67b45fc4e3 | |||
c28f3ab225 | |||
027ca88d83 | |||
9d5b9204fc | |||
39e0ed2962 | |||
3b973e12a4 | |||
d80573bdc5 | |||
4182bfd8b5 | |||
07a5b49454 | |||
16be699190 | |||
e523dd188c | |||
73a2682ed6 | |||
3d9f8c80a5 | |||
754061dba5 | |||
48c520150f | |||
a754196a48 | |||
23fce4e74d | |||
ab05abe787 | |||
67c8febb33 | |||
5d397716de | |||
5a7c46b3ef | |||
ec925491b2 | |||
2d18c1bb6f | |||
2aba32de19 | |||
a13dc847f0 | |||
d66670f6ac | |||
3418943949 | |||
f5c89f68a4 | |||
8fc942fbf4 | |||
83d2c8fc33 | |||
89839096ee | |||
a08d4bc720 | |||
7674ef3950 | |||
72c474f3b1 | |||
1dfc0b2e93 | |||
291573fbc5 | |||
0995658ca6 | |||
53f3764879 | |||
bdd8b59ab9 | |||
c3a8e35a2f | |||
c979be6e25 | |||
b7092cc307 | |||
3aa262efbe | |||
3cc326bca8 | |||
168c34f172 | |||
b3da1d223c | |||
107f2745c8 | |||
6f9002eb01 | |||
12db0637ec | |||
8d169a8bd9 | |||
f47ce9a360 | |||
4816b90378 | |||
01a897dbc2 | |||
45eb8baee8 | |||
4bf6cfc4d8 | |||
fddcb3a835 | |||
5d51621278 | |||
9ffc720f48 | |||
b6b72e389d | |||
5ae593bc00 | |||
44fe477c3c | |||
43bc60610d | |||
c21c1757de | |||
d3197f3430 | |||
3d23770e9d | |||
0fc0a62279 | |||
4da370b458 | |||
aa3e085536 | |||
253b676f7d | |||
9f4f911fd3 | |||
6ebfb5138c | |||
ab8ed8599e | |||
c76fb2eed0 | |||
4d8978ea90 | |||
64540cc870 | |||
5b05884a2b | |||
eef3ef2165 | |||
235296c749 | |||
8d13235b74 | |||
5ef5c70490 | |||
3fe627528e | |||
674eeed763 | |||
4bd91180df | |||
0af4824fa6 | |||
64eb953593 | |||
45704cf20a | |||
b5714afac7 | |||
ff109206fd | |||
49bd028363 | |||
44bf9a890e | |||
b60c6d4144 | |||
ef239e6430 | |||
58cd6007b2 | |||
1dcf6e8962 | |||
db95dfe38d | |||
860c85d012 | |||
6ca1654129 | |||
a2dc594a44 | |||
c6bc8e2ddf | |||
48a234e86f | |||
cf521eba5a | |||
52ebc78aaa | |||
1f7d52c5ce | |||
3251bdc220 | |||
93fee5f0e5 | |||
46c8db7f4b | |||
fc74c0209a | |||
07bfc3da1e | |||
2a4daa5360 | |||
e1a6dede54 | |||
cf40e5047e | |||
17ee076f3d | |||
4d12a98c5d | |||
d5329432fe | |||
8a926aaa73 | |||
5156aeee0f | |||
1690812936 | |||
c693a2c3f4 | |||
d6cac5c765 | |||
2722b9b7ea | |||
014fc6169a | |||
a7a722c9c0 | |||
da581dde70 | |||
17fc775fd3 | |||
eb57c787f3 | |||
97e789323a | |||
290f576641 | |||
9723aa11df | |||
4e04461820 | |||
147ebf1a5e | |||
e22fce02f8 | |||
3b8cb9e525 | |||
beffb72e3b | |||
b5c53d5e40 | |||
477dbc6daf | |||
3aaabdcc9d | |||
d045b0be1a | |||
e2bd96c5de | |||
be9790ef8a | |||
f8ef2b666f | |||
7bc63791c9 | |||
a9909fcf6d | |||
1fa9b3a996 | |||
5019346ab6 | |||
f22f1ebcde | |||
1e328436d8 | |||
cb9a759aa0 | |||
b80c528531 | |||
e03d2c06a8 | |||
501d63b3aa | |||
1c2cdfe06a | |||
118555c97a | |||
6af9fbc94e | |||
3020f9506e | |||
ce9c6a9689 | |||
8f2d573721 | |||
97c31d0a21 | |||
46d28d8082 | |||
d248dd5b1b | |||
474677017f | |||
0813a49ca5 | |||
d0308a8239 | |||
6843c8389b | |||
7b0f89398d | |||
97b867298a | |||
76d5cbcea9 | |||
2b925536d3 | |||
4baa5ae7a2 | |||
3f9d4f7083 | |||
10186a2e67 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2022.11.1
|
||||
current_version = 2023.5.6
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
@ -1,8 +1,9 @@
|
||||
env
|
||||
static
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
Dockerfile
|
||||
authentik/enterprise
|
||||
|
@ -7,8 +7,14 @@ charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[html]
|
||||
[*.html]
|
||||
indent_size = 2
|
||||
|
||||
[yaml]
|
||||
[*.{yaml,yml}]
|
||||
indent_size = 2
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
title: ""
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
@ -12,6 +11,7 @@ A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
@ -27,8 +27,9 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
title: ""
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
|
10
.github/ISSUE_TEMPLATE/question.md
vendored
10
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,10 +1,9 @@
|
||||
---
|
||||
name: Question
|
||||
about: Ask a question about a feature or specific configuration
|
||||
title: ''
|
||||
title: ""
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe your question/**
|
||||
@ -20,8 +19,9 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
@ -1,5 +1,5 @@
|
||||
name: 'Comment usage instructions on PRs'
|
||||
description: 'Comment usage instructions on PRs'
|
||||
name: "Comment usage instructions on PRs"
|
||||
description: "Comment usage instructions on PRs"
|
||||
|
||||
inputs:
|
||||
tag:
|
||||
@ -17,7 +17,7 @@ runs:
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik PR Installation instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
@ -38,6 +38,14 @@ runs:
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
@ -54,6 +62,17 @@ runs:
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
edit-mode: replace
|
||||
|
12
.github/actions/docker-push-variables/action.yml
vendored
12
.github/actions/docker-push-variables/action.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: 'Prepare docker environment variables'
|
||||
description: 'Prepare docker environment variables'
|
||||
name: "Prepare docker environment variables"
|
||||
description: "Prepare docker environment variables"
|
||||
|
||||
outputs:
|
||||
shouldBuild:
|
||||
@ -17,6 +17,9 @@ outputs:
|
||||
sha:
|
||||
description: "sha"
|
||||
value: ${{ steps.ev.outputs.sha }}
|
||||
shortHash:
|
||||
description: "shortHash"
|
||||
value: ${{ steps.ev.outputs.shortHash }}
|
||||
version:
|
||||
description: "version"
|
||||
value: ${{ steps.ev.outputs.version }}
|
||||
@ -48,11 +51,14 @@ runs:
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||
print("sha=%s" % sha, file=_output)
|
||||
print("shortHash=%s" % sha[:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
||||
|
20
.github/actions/setup/action.yml
vendored
20
.github/actions/setup/action.yml
vendored
@ -1,5 +1,10 @@
|
||||
name: 'Setup authentik testing environment'
|
||||
description: 'Setup authentik testing environment'
|
||||
name: "Setup authentik testing environment"
|
||||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_tag:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -13,17 +18,18 @@ runs:
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'poetry'
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3.1.0
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
|
14
.github/actions/setup/docker-compose.yml
vendored
14
.github/actions/setup/docker-compose.yml
vendored
@ -1,23 +1,21 @@
|
||||
version: '3.7'
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
container_name: postgres
|
||||
image: library/postgres:12
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
- db-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: authentik
|
||||
ports:
|
||||
- 5432:5432
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
container_name: redis
|
||||
image: library/redis
|
||||
image: docker.io/library/redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
11
.github/codecov.yml
vendored
11
.github/codecov.yml
vendored
@ -1,3 +1,10 @@
|
||||
coverage:
|
||||
precision: 2
|
||||
round: up
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# adjust accordingly based on how flaky your tests are
|
||||
# this allows a 1% drop from the previous base commit coverage
|
||||
threshold: 1%
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
|
1
.github/codespell-dictionary.txt
vendored
Normal file
1
.github/codespell-dictionary.txt
vendored
Normal file
@ -0,0 +1 @@
|
||||
authentic->authentik
|
108
.github/dependabot.yml
vendored
108
.github/dependabot.yml
vendored
@ -1,62 +1,50 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
|
36
.github/pull_request_template.md
vendored
36
.github/pull_request_template.md
vendored
@ -1,19 +1,39 @@
|
||||
<!--
|
||||
👋 Hello there! Welcome.
|
||||
|
||||
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/main/CONTRIBUTING.md#how-can-i-contribute).
|
||||
Please check the [Contributing guidelines](https://goauthentik.io/developer-docs/#how-can-i-contribute).
|
||||
-->
|
||||
|
||||
# Details
|
||||
* **Does this resolve an issue?**
|
||||
Resolves #
|
||||
## Details
|
||||
|
||||
- **Does this resolve an issue?**
|
||||
Resolves #
|
||||
|
||||
## Changes
|
||||
|
||||
### New Features
|
||||
* Adds feature which does x, y, and z.
|
||||
|
||||
- Adds feature which does x, y, and z.
|
||||
|
||||
### Breaking Changes
|
||||
* Adds breaking change which causes \<issue\>.
|
||||
|
||||
## Additional
|
||||
Any further notes or comments you want to make.
|
||||
- Adds breaking change which causes \<issue\>.
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Local tests pass (`ak test authentik/`)
|
||||
- [ ] The code has been formatted (`make lint-fix`)
|
||||
|
||||
If an API change has been made
|
||||
|
||||
- [ ] The API schema has been updated (`make gen-build`)
|
||||
|
||||
If changes to the frontend have been made
|
||||
|
||||
- [ ] The code has been formatted (`make web`)
|
||||
- [ ] The translation files have been updated (`make i18n-extract`)
|
||||
|
||||
If applicable
|
||||
|
||||
- [ ] The documentation has been updated
|
||||
- [ ] The documentation has been formatted (`make website`)
|
||||
|
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -16,3 +16,4 @@ markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
only: issues
|
||||
|
4
.github/transifex.yml
vendored
4
.github/transifex.yml
vendored
@ -6,11 +6,11 @@ git:
|
||||
source_language: en
|
||||
source_file: web/src/locales/en.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||
translation_files_expression: "web/src/locales/<lang>.po"
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: locale/en/LC_MESSAGES/django.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
||||
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
|
||||
|
142
.github/workflows/ci-main.yml
vendored
142
.github/workflows/ci-main.yml
vendored
@ -23,12 +23,14 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- pylint
|
||||
- black
|
||||
- isort
|
||||
- bandit
|
||||
- pyright
|
||||
- black
|
||||
- codespell
|
||||
- isort
|
||||
- pending-migrations
|
||||
- pylint
|
||||
- pyright
|
||||
- ruff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@ -59,7 +61,7 @@ jobs:
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
@ -79,11 +81,21 @@ jobs:
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 11-alpine
|
||||
- 12-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
@ -94,22 +106,43 @@ jobs:
|
||||
flags: unit
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.4.0
|
||||
uses: helm/kind-action@v1.7.0
|
||||
- name: run integration
|
||||
run: |
|
||||
poetry run make test-integration
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: integration
|
||||
test-e2e-provider:
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: proxy
|
||||
glob: tests/e2e/test_provider_proxy*
|
||||
- name: oauth
|
||||
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
|
||||
- name: oauth-oidc
|
||||
glob: tests/e2e/test_provider_oidc*
|
||||
- name: saml
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: radius
|
||||
glob: tests/e2e/test_provider_radius*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
@ -131,36 +164,7 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-provider
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: e2e
|
||||
test-e2e-rest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-rest
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
@ -173,8 +177,7 @@ jobs:
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e-rest
|
||||
- test-e2e-provider
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
@ -182,13 +185,10 @@ jobs:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -205,20 +205,62 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
|
46
.github/workflows/ci-outpost.yml
vendored
46
.github/workflows/ci-outpost.yml
vendored
@ -15,9 +15,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
@ -28,13 +28,16 @@ jobs:
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
args: --timeout 5000s
|
||||
skip-pkg-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
@ -47,7 +50,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
build-container:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -57,11 +60,12 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
- radius
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -80,20 +84,20 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
build-outpost-binary:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
build-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -104,17 +108,20 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
@ -128,8 +135,5 @@ jobs:
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
export CGO_ENABLED=0
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
|
41
.github/workflows/ci-web.yml
vendored
41
.github/workflows/ci-web.yml
vendored
@ -15,10 +15,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -27,14 +27,30 @@ jobs:
|
||||
- name: Eslint
|
||||
working-directory: web/
|
||||
run: npm run lint
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: TSC
|
||||
working-directory: web/
|
||||
run: npm run tsc
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -47,10 +63,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: |
|
||||
@ -69,6 +85,7 @@ jobs:
|
||||
- lint-eslint
|
||||
- lint-prettier
|
||||
- lint-lit-analyse
|
||||
- lint-build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
@ -78,10 +95,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
|
43
.github/workflows/ci-website.yml
vendored
43
.github/workflows/ci-website.yml
vendored
@ -15,19 +15,56 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: prettier
|
||||
working-directory: website/
|
||||
run: npm run prettier-check
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: test
|
||||
working-directory: website/
|
||||
run: npm test
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.job }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: build
|
||||
working-directory: website/
|
||||
run: npm run ${{ matrix.job }}
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-prettier
|
||||
- test
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
|
56
.github/workflows/codeql-analysis.yml
vendored
56
.github/workflows/codeql-analysis.yml
vendored
@ -2,12 +2,11 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, '*', next, version* ]
|
||||
branches: [main, "*", next, version*]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '30 6 * * 5'
|
||||
- cron: "30 6 * * 5"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
@ -21,40 +20,17 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go', 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
language: ["go", "javascript", "python"]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
11
.github/workflows/ghcr-retention.yml
vendored
11
.github/workflows/ghcr-retention.yml
vendored
@ -2,7 +2,7 @@ name: ghcr-retention
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -10,13 +10,18 @@ jobs:
|
||||
name: Delete old unused container images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: sondrelg/container-retention-policy@v1
|
||||
uses: snok/container-retention-policy@v2
|
||||
with:
|
||||
image-names: dev-server,dev-ldap,dev-proxy
|
||||
cut-off: One week ago UTC
|
||||
account-type: org
|
||||
org-name: goauthentik
|
||||
untagged-only: false
|
||||
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
skip-tags: gh-next,gh-main
|
||||
|
41
.github/workflows/release-publish.yml
vendored
41
.github/workflows/release-publish.yml
vendored
@ -27,10 +27,13 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: |
|
||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||
@ -39,7 +42,8 @@ jobs:
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@ -48,11 +52,12 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -71,8 +76,8 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
@ -84,6 +89,8 @@ jobs:
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
@ -93,17 +100,18 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
@ -115,6 +123,7 @@ jobs:
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
export CGO_ENABLED=0
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- name: Upload binaries to release
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
@ -161,11 +170,9 @@ jobs:
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: beryjuorg
|
||||
SENTRY_ORG: authentik-security-inc
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@${{ steps.ev.outputs.version }}
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
sourcemaps: "./web/dist"
|
||||
url_prefix: "~/static/dist"
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -3,7 +3,7 @@ name: authentik-on-tag
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'version/*'
|
||||
- "version/*"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@ -22,18 +22,23 @@ jobs:
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: ${{ steps.generate_token.outputs.token }}
|
||||
script: |
|
||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.1.4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
|
34
.github/workflows/translation-advice.yml
vendored
Normal file
34
.github/workflows/translation-advice.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: authentik-translation-advice
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "!**"
|
||||
- "locale/**"
|
||||
- "web/src/locales/**"
|
||||
|
||||
jobs:
|
||||
post-comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik translations instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
### authentik translations instructions
|
||||
|
||||
Thanks for your pull request!
|
||||
|
||||
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.
|
20
.github/workflows/translation-compile.yml
vendored
20
.github/workflows/translation-compile.yml
vendored
@ -1,12 +1,9 @@
|
||||
name: authentik-backend-translate-compile
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
paths:
|
||||
- '/locale/'
|
||||
pull_request:
|
||||
paths:
|
||||
- '/locale/'
|
||||
- "locale/**"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@ -18,16 +15,23 @@ jobs:
|
||||
compile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run compile
|
||||
run: poetry run ./manage.py compilemessages
|
||||
run: poetry run ak compilemessages
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
|
31
.github/workflows/web-api-publish.yml
vendored
31
.github/workflows/web-api-publish.yml
vendored
@ -1,19 +1,26 @@
|
||||
name: authentik-web-api-publish
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'schema.yml'
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.5.1
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
- name: Publish package
|
||||
@ -28,14 +35,20 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: update-web-api-client
|
||||
commit-message: "web: bump API Client version"
|
||||
title: "web: bump API Client version"
|
||||
body: "web: bump API Client version"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
team-reviewers: "@goauthentik/core"
|
||||
author: authentik bot <github-bot@goauthentik.io>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -194,11 +194,12 @@ pip-selfcheck.json
|
||||
/static/
|
||||
local.env.yml
|
||||
|
||||
# Selenium Screenshots
|
||||
selenium_screenshots/
|
||||
backups/
|
||||
media/
|
||||
*mmdb
|
||||
|
||||
.idea/
|
||||
/gen-*/
|
||||
data/
|
||||
|
||||
# Local Netlify folder
|
||||
.netlify
|
||||
|
21
.vscode/extensions.json
vendored
Normal file
21
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
"github.vscode-github-actions",
|
||||
"golang.go",
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-python.pylint",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
]
|
||||
}
|
17
.vscode/settings.json
vendored
17
.vscode/settings.json
vendored
@ -14,7 +14,10 @@
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless",
|
||||
"kubernetes"
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo",
|
||||
"scim",
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
@ -24,7 +27,11 @@
|
||||
"!Find sequence",
|
||||
"!KeyOf scalar",
|
||||
"!Context scalar",
|
||||
"!Format sequence"
|
||||
"!Context sequence",
|
||||
"!Format sequence",
|
||||
"!Condition sequence",
|
||||
"!Env sequence",
|
||||
"!Env scalar"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||
@ -40,5 +47,11 @@
|
||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||
"ignoreCase": false
|
||||
}
|
||||
],
|
||||
"go.testFlags": [
|
||||
"-count=1"
|
||||
],
|
||||
"github-actions.workflows.pinned.workflows": [
|
||||
".github/workflows/ci-main.yml"
|
||||
]
|
||||
}
|
||||
|
2
CODEOWNERS
Normal file
2
CODEOWNERS
Normal file
@ -0,0 +1,2 @@
|
||||
* @goauthentik/core
|
||||
website/docs/security/** @goauthentik/security
|
@ -17,24 +17,24 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
|
175
CONTRIBUTING.md
175
CONTRIBUTING.md
@ -1,175 +0,0 @@
|
||||
# Contributing to authentik
|
||||
|
||||
:+1::tada: Thanks for taking the time to contribute! :tada::+1:
|
||||
|
||||
The following is a set of guidelines for contributing to authentik and its components, which are hosted in the [goauthentik Organization](https://github.com/goauthentik) on GitHub. These are mostly guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request.
|
||||
|
||||
#### Table Of Contents
|
||||
|
||||
[Code of Conduct](#code-of-conduct)
|
||||
|
||||
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
||||
|
||||
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
||||
* [The components](#the-components)
|
||||
* [authentik's structure](#authentiks-structure)
|
||||
|
||||
[How Can I Contribute?](#how-can-i-contribute)
|
||||
* [Reporting Bugs](#reporting-bugs)
|
||||
* [Suggesting Enhancements](#suggesting-enhancements)
|
||||
* [Your First Code Contribution](#your-first-code-contribution)
|
||||
* [Pull Requests](#pull-requests)
|
||||
|
||||
[Styleguides](#styleguides)
|
||||
* [Git Commit Messages](#git-commit-messages)
|
||||
* [Python Styleguide](#python-styleguide)
|
||||
* [Documentation Styleguide](#documentation-styleguide)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Basically, don't be a dickhead. This is an open-source non-profit project, that is made in the free time of Volunteers. If there's something you dislike or think can be done better, tell us! We'd love to hear any suggestions for improvement.
|
||||
|
||||
## I don't want to read this whole thing I just have a question!!!
|
||||
|
||||
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://goauthentik.io/discord)
|
||||
|
||||
## What should I know before I get started?
|
||||
|
||||
### The components
|
||||
|
||||
authentik consists of a few larger components:
|
||||
|
||||
- *authentik* the actual application server, is described below.
|
||||
- *outpost-proxy* is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- *outpost-ldap* is a Go LDAP server that uses the *authentik* application server as its backend
|
||||
- *web* is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- *website* is the Website/documentation, which uses docusaurus.
|
||||
|
||||
### authentik's structure
|
||||
|
||||
authentik is at it's very core a Django project. It consists of many individual django applications. These applications are intended to separate concerns, and they may share code between each other.
|
||||
|
||||
These are the current packages:
|
||||
<a id="authentik-packages"/>
|
||||
|
||||
```
|
||||
authentik
|
||||
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
||||
├── api - General API Configuration (Routes, Schema and general API utilities)
|
||||
├── core - Core authentik functionality, central routes, core Models
|
||||
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
||||
├── events - Event Log, middleware and signals to generate signals
|
||||
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
||||
├── lib - Generic library of functions, few dependencies on other packages.
|
||||
├── managed - Handle managed models and their state.
|
||||
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
||||
├── policies - General PolicyEngine
|
||||
│ ├── dummy - A Dummy policy used for testing
|
||||
│ ├── event_matcher - Match events based on different criteria
|
||||
│ ├── expiry - Check when a user's password was last set
|
||||
│ ├── expression - Execute any arbitrary python code
|
||||
│ ├── hibp - Check a password against HaveIBeenPwned
|
||||
│ ├── password - Check a password against several rules
|
||||
│ └── reputation - Check the user's/client's reputation
|
||||
├── providers
|
||||
│ ├── ldap - Provide LDAP access to authentik users/groups using an outpost
|
||||
│ ├── oauth2 - OIDC-compliant OAuth2 provider
|
||||
│ ├── proxy - Provides an identity-aware proxy using an outpost
|
||||
│ └── saml - SAML2 Provider
|
||||
├── recovery - Generate keys to use in case you lock yourself out
|
||||
├── root - Root django application, contains global settings and routes
|
||||
├── sources
|
||||
│ ├── ldap - Sync LDAP users from OpenLDAP or Active Directory into authentik
|
||||
│ ├── oauth - OAuth1 and OAuth2 Source
|
||||
│ ├── plex - Plex source
|
||||
│ └── saml - SAML2 Source
|
||||
├── stages
|
||||
│ ├── authenticator_duo - Configure a DUO authenticator
|
||||
│ ├── authenticator_static - Configure TOTP backup keys
|
||||
│ ├── authenticator_totp - Configure a TOTP authenticator
|
||||
│ ├── authenticator_validate - Validate any authenticator
|
||||
│ ├── authenticator_webauthn - Configure a WebAuthn authenticator
|
||||
│ ├── captcha - Make the user pass a captcha
|
||||
│ ├── consent - Let the user decide if they want to consent to an action
|
||||
│ ├── deny - Static deny, can be used with policies
|
||||
│ ├── dummy - Dummy stage to test
|
||||
│ ├── email - Send the user an email and block execution until they click the link
|
||||
│ ├── identification - Identify a user with any combination of fields
|
||||
│ ├── invitation - Invitation system to limit flows to certain users
|
||||
│ ├── password - Password authentication
|
||||
│ ├── prompt - Arbitrary prompts
|
||||
│ ├── user_delete - Delete the currently pending user
|
||||
│ ├── user_login - Login the currently pending user
|
||||
│ ├── user_logout - Logout the currently pending user
|
||||
│ └── user_write - Write any currenetly pending data to the user.
|
||||
└── tenants - Soft tennancy, configure defaults and branding per domain
|
||||
```
|
||||
|
||||
This django project is running in gunicorn, which spawns multiple workers and threads. Gunicorn is run from a lightweight Go application which reverse-proxies it, handles static files and will eventually gain more functionality as more code is migrated to go.
|
||||
|
||||
There are also several background tasks which run in Celery, the root celery application is defined in `authentik.root.celery`.
|
||||
|
||||
## How Can I Contribute?
|
||||
|
||||
### Reporting Bugs
|
||||
|
||||
This section guides you through submitting a bug report for authentik. Following these guidelines helps maintainers and the community understand your report, reproduce the behavior, and find related reports.
|
||||
|
||||
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
|
||||
|
||||
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
|
||||
|
||||
### Suggesting Enhancements
|
||||
|
||||
This section guides you through submitting an enhancement suggestion for authentik, including completely new features and minor improvements to existing functionality. Following these guidelines helps maintainers and the community understand your suggestion and find related suggestions.
|
||||
|
||||
When you are creating an enhancement suggestion, please fill in [the template](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=), including the steps that you imagine you would take if the feature you're requesting existed.
|
||||
|
||||
### Your First Code Contribution
|
||||
|
||||
#### Local development
|
||||
|
||||
authentik can be run locally, all though depending on which part you want to work on, different pre-requisites are required.
|
||||
|
||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
### Pull Requests
|
||||
|
||||
The process described here has several goals:
|
||||
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
1. Follow the [styleguides](#styleguides)
|
||||
2. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing?</summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
|
||||
3. Ensure your Code has tests. While it is not always possible to test every single case, the majority of the code should be tested.
|
||||
|
||||
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
|
||||
|
||||
## Styleguides
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
* Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
* Reference issues and pull requests liberally after the first line
|
||||
|
||||
### Python Styleguide
|
||||
|
||||
All Python code is linted with [black](https://black.readthedocs.io/en/stable/), [PyLint](https://www.pylint.org/) and [isort](https://pycqa.github.io/isort/).
|
||||
|
||||
authentik runs on Python 3.9 at the time of writing this.
|
||||
|
||||
* Use native type-annotations wherever possible.
|
||||
* Add meaningful docstrings when possible.
|
||||
* Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
* If your code changes central functions, make sure nothing else is broken.
|
||||
|
||||
### Documentation Styleguide
|
||||
|
||||
* Use [MDX](https://mdxjs.com/) whenever appropriate.
|
1
CONTRIBUTING.md
Symbolic link
1
CONTRIBUTING.md
Symbolic link
@ -0,0 +1 @@
|
||||
website/developer-docs/index.md
|
40
Dockerfile
40
Dockerfile
@ -1,25 +1,26 @@
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./SECURITY.md /work/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
RUN npm ci && npm run build-docs-only
|
||||
RUN npm ci --include=dev && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/web
|
||||
RUN npm ci && npm run build
|
||||
RUN npm ci --include=dev && npm run build
|
||||
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.0-slim-bullseye AS poetry-locker
|
||||
FROM docker.io/python:3.11.3-slim-bullseye AS poetry-locker
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
@ -30,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.19.3-bullseye AS go-builder
|
||||
FROM docker.io/golang:1.20.4-bullseye AS go-builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -45,8 +46,24 @@ COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
|
||||
# Stage 5: Run
|
||||
FROM docker.io/python:3.11.0-slim-bullseye AS final-image
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM ghcr.io/maxmind/geoipupdate:v5.1 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
|
||||
USER root
|
||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "\
|
||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
|
||||
"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.3-slim-bullseye AS final-image
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
@ -59,6 +76,7 @@ ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
@ -66,7 +84,7 @@ RUN apt-get update && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends curl runit && \
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||
apt-get autoremove --purge -y && \
|
||||
@ -79,13 +97,13 @@ RUN apt-get update && \
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./xml /xml
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /work/authentik /authentik-proxy
|
||||
COPY --from=go-builder /work/authentik /bin/authentik
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
|
9
LICENSE
9
LICENSE
@ -1,6 +1,11 @@
|
||||
MIT License
|
||||
Copyright (c) 2023 Jens Langhammer
|
||||
|
||||
Copyright (c) 2022 Jens Langhammer
|
||||
Portions of this software are licensed as follows:
|
||||
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
|
||||
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
|
||||
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
|
||||
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
|
||||
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
87
Makefile
87
Makefile
@ -3,18 +3,24 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/developer-docs \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-integration:
|
||||
coverage run manage.py test tests/integration
|
||||
|
||||
test-e2e-provider:
|
||||
coverage run manage.py test tests/e2e/test_provider*
|
||||
|
||||
test-e2e-rest:
|
||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
@ -33,28 +39,19 @@ test:
|
||||
coverage report
|
||||
|
||||
lint-fix:
|
||||
isort authentik tests scripts lifecycle
|
||||
black authentik tests scripts lifecycle
|
||||
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/docs \
|
||||
website/developer-docs
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint:
|
||||
pylint authentik tests lifecycle
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v ./cmd/server/
|
||||
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
|
||||
i18n-extract-core:
|
||||
@ -68,15 +65,20 @@ gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog:
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
npx prettier --write changelog.md
|
||||
|
||||
gen-diff:
|
||||
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
@ -86,7 +88,7 @@ gen-client-ts:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
@ -99,20 +101,21 @@ gen-client-ts:
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go:
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||
mkdir -p templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
||||
cp schema.yml ./gen-go-api/
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--rm -v ${PWD}/gen-go-api:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/gen-go-api \
|
||||
-o /local/ \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf config.yaml ./templates/
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
@ -126,7 +129,7 @@ gen: gen-build gen-clean gen-client-ts
|
||||
web-build: web-install
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint
|
||||
web: web-lint-fix web-lint web-check-compile
|
||||
|
||||
web-install:
|
||||
cd web && npm ci
|
||||
@ -144,6 +147,9 @@ web-lint:
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
cd web && npm run tsc
|
||||
|
||||
web-extract:
|
||||
cd web && npm run extract
|
||||
|
||||
@ -167,7 +173,6 @@ website-watch:
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
PY_SOURCES=authentik tests lifecycle
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
@ -178,6 +183,12 @@ ci-pylint: ci--meta-debug
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
|
||||
@ -195,6 +206,8 @@ install: web-install website-install
|
||||
|
||||
dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U postgres -h localhost test_authentik || true
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
make migrate
|
||||
|
36
README.md
36
README.md
@ -5,39 +5,43 @@
|
||||
---
|
||||
|
||||
[](https://goauthentik.io/discord)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||
[](https://www.transifex.com/beryjuorg/authentik/)
|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
|
||||
Authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. Authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
|
||||
|
||||
## Installation
|
||||
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
|
||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
||||
|
||||
## Screenshots
|
||||
|
||||
Light | Dark
|
||||
--- | ---
|
||||
 | 
|
||||
 | 
|
||||
| Light | Dark |
|
||||
| ------------------------------------------------------ | ----------------------------------------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
|
||||
## Development
|
||||
|
||||
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
@ -49,11 +53,3 @@ This project is proudly sponsored by:
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
40
SECURITY.md
40
SECURITY.md
@ -1,18 +1,44 @@
|
||||
# Security Policy
|
||||
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2022.10.x | :white_check_mark: |
|
||||
| 2022.11.x | :white_check_mark: |
|
||||
| Version | Supported |
|
||||
| --------- | ------------------ |
|
||||
| 2023.4.x | :white_check_mark: |
|
||||
| 2023.5.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io)
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
||||
|
||||
## Criticality levels
|
||||
|
||||
### High
|
||||
|
||||
- Authorization bypass
|
||||
- Circumvention of policies
|
||||
|
||||
### Moderate
|
||||
|
||||
- Denial-of-Service attacks
|
||||
|
||||
### Low
|
||||
|
||||
- Unvalidated redirects
|
||||
- Issues requiring uncommon setups
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Issue is reported via Email as listed above.
|
||||
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||
3. A criticality level is assigned.
|
||||
4. A fix is created, and if possible tested by the issue reporter.
|
||||
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
||||
7. The fixed version is released for the supported versions.
|
||||
|
||||
## Getting security notifications
|
||||
|
||||
To get security notifications, join the [discord](https://goauthentik.io/discord) server. In the future there will be a mailing list too.
|
||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||
|
@ -2,16 +2,14 @@
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2022.11.1"
|
||||
__version__ = "2023.5.6"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
||||
"""Get build hash"""
|
||||
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||
if build_hash == "" and fallback:
|
||||
return fallback
|
||||
return build_hash
|
||||
return fallback if build_hash == "" and fallback else build_hash
|
||||
|
||||
|
||||
def get_full_version() -> str:
|
||||
|
@ -1,4 +1,7 @@
|
||||
"""authentik administration metrics"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models.functions import ExtractHour
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
@ -21,38 +24,44 @@ class CoordinateSerializer(PassiveSerializer):
|
||||
class LoginMetricsSerializer(PassiveSerializer):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
authorizations_per_1h = SerializerMethodField()
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get successful authorizations per hour for the last 24 hours"""
|
||||
def get_authorizations(self, _):
|
||||
"""Get successful authorizations per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.AUTHORIZE_APPLICATION)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""authentik administration overview"""
|
||||
import os
|
||||
import platform
|
||||
from datetime import datetime
|
||||
from sys import version as python_version
|
||||
@ -8,7 +7,6 @@ from typing import TypedDict
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from gunicorn import version_info as gunicorn_version
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -16,6 +14,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
@ -34,7 +33,6 @@ class RuntimeDict(TypedDict):
|
||||
class SystemSerializer(PassiveSerializer):
|
||||
"""Get system information."""
|
||||
|
||||
env = SerializerMethodField()
|
||||
http_headers = SerializerMethodField()
|
||||
http_host = SerializerMethodField()
|
||||
http_is_secure = SerializerMethodField()
|
||||
@ -43,10 +41,6 @@ class SystemSerializer(PassiveSerializer):
|
||||
server_time = SerializerMethodField()
|
||||
embedded_outpost_host = SerializerMethodField()
|
||||
|
||||
def get_env(self, request: Request) -> dict[str, str]:
|
||||
"""Get Environment"""
|
||||
return os.environ.copy()
|
||||
|
||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||
"""Get HTTP Request headers"""
|
||||
headers = {}
|
||||
@ -69,7 +63,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
return {
|
||||
"python_version": python_version,
|
||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
||||
"environment": "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose",
|
||||
"environment": get_env(),
|
||||
"architecture": platform.machine(),
|
||||
"platform": platform.platform(),
|
||||
"uname": " ".join(platform.uname()),
|
||||
@ -97,8 +91,14 @@ class SystemView(APIView):
|
||||
permission_classes = [IsAdminUser]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
@ -7,7 +7,13 @@ from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -26,6 +32,7 @@ class TaskSerializer(PassiveSerializer):
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||
task_duration = SerializerMethodField()
|
||||
|
||||
status = ChoiceField(
|
||||
source="result.status.name",
|
||||
@ -33,13 +40,18 @@ class TaskSerializer(PassiveSerializer):
|
||||
)
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def to_representation(self, instance):
|
||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||
"""Get the duration a task took to run"""
|
||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||
|
||||
def to_representation(self, instance: TaskInfo):
|
||||
"""When a new version of authentik adds fields to TaskInfo,
|
||||
the API will fail with an AttributeError, as the classes
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
except AttributeError: # pragma: no cover
|
||||
# pylint: disable=broad-except
|
||||
except Exception: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
@ -68,7 +80,6 @@ class TaskViewSet(ViewSet):
|
||||
),
|
||||
],
|
||||
)
|
||||
# pylint: disable=invalid-name
|
||||
def retrieve(self, request: Request, pk=None) -> Response:
|
||||
"""Get a single system task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
@ -99,7 +110,6 @@ class TaskViewSet(ViewSet):
|
||||
],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
|
@ -8,7 +8,6 @@ from authentik.root.monitoring import monitoring_set
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
@ -16,8 +15,7 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
||||
task.update_metrics()
|
||||
|
@ -9,6 +9,7 @@ from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAdminAPI(TestCase):
|
||||
@ -16,8 +17,8 @@ class TestAdminAPI(TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
self.group = Group.objects.create(name="superusers", is_superuser=True)
|
||||
self.user = User.objects.create(username=generate_id())
|
||||
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
self.group.users.add(self.user)
|
||||
self.group.save()
|
||||
self.client.force_login(self.user)
|
||||
|
22
authentik/admin/urls.py
Normal file
22
authentik/admin/urls.py
Normal file
@ -0,0 +1,22 @@
|
||||
"""API URLs"""
|
||||
from django.urls import path
|
||||
|
||||
from authentik.admin.api.meta import AppsViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
|
||||
api_urlpatterns = [
|
||||
("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
|
||||
("admin/apps", AppsViewSet, "apps"),
|
||||
path(
|
||||
"admin/metrics/",
|
||||
AdministrationMetricsViewSet.as_view(),
|
||||
name="admin_metrics",
|
||||
),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
]
|
@ -1,4 +1,5 @@
|
||||
"""API Authentication"""
|
||||
from hmac import compare_digest
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
@ -11,7 +12,6 @@ from authentik.core.middleware import CTX_AUTH_VIA
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -33,6 +33,18 @@ def validate_auth(header: bytes) -> Optional[str]:
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
user = auth_user_lookup(raw_header)
|
||||
if not user:
|
||||
return None
|
||||
if not user.is_active:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return user
|
||||
|
||||
|
||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
auth_credentials = validate_auth(raw_header)
|
||||
if not auth_credentials:
|
||||
return None
|
||||
@ -44,8 +56,8 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
CTX_AUTH_VIA.set("api_token")
|
||||
return key_token.user
|
||||
# then try to auth via JWT
|
||||
jwt_token = RefreshToken.filter_not_expired(
|
||||
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
jwt_token = AccessToken.filter_not_expired(
|
||||
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
).first()
|
||||
if jwt_token:
|
||||
# Double-check scopes, since they are saved in a single string
|
||||
@ -67,7 +79,7 @@ def token_secret_key(value: str) -> Optional[User]:
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
|
||||
if value != settings.SECRET_KEY:
|
||||
if not compare_digest(value, settings.SECRET_KEY):
|
||||
return None
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts:
|
||||
|
@ -1,9 +1,15 @@
|
||||
"""API Authorization"""
|
||||
from django.conf import settings
|
||||
from django.db.models import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
@ -17,6 +23,20 @@ class OwnerFilter(BaseFilterBackend):
|
||||
return queryset.filter(**{self.owner_key: request.user})
|
||||
|
||||
|
||||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class OwnerPermissions(BasePermission):
|
||||
"""Authorize requests by an object's owner matching the requesting user"""
|
||||
|
||||
|
@ -7,82 +7,13 @@ API Browser - {{ tenant.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
|
||||
<script>
|
||||
function getCookie(name) {
|
||||
let cookieValue = "";
|
||||
if (document.cookie && document.cookie !== "") {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (let i = 0; i < cookies.length; i++) {
|
||||
const cookie = cookies[i].trim();
|
||||
// Does this cookie string begin with the name we want?
|
||||
if (cookie.substring(0, name.length + 1) === name + "=") {
|
||||
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookieValue;
|
||||
}
|
||||
window.addEventListener('DOMContentLoaded', (event) => {
|
||||
const rapidocEl = document.querySelector('rapi-doc');
|
||||
rapidocEl.addEventListener('before-try', (e) => {
|
||||
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<style>
|
||||
img.logo {
|
||||
width: 100%;
|
||||
padding: 1rem 0.5rem 1.5rem 0.5rem;
|
||||
min-height: 48px;
|
||||
}
|
||||
</style>
|
||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
<link rel="icon" href="{{ tenant.branding_favicon }}">
|
||||
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<rapi-doc
|
||||
spec-url="{{ path }}"
|
||||
heading-text=""
|
||||
theme="light"
|
||||
render-style="read"
|
||||
default-schema-tab="schema"
|
||||
primary-color="#fd4b2d"
|
||||
nav-bg-color="#212427"
|
||||
bg-color="#000000"
|
||||
text-color="#000000"
|
||||
nav-text-color="#ffffff"
|
||||
nav-hover-bg-color="#3c3f42"
|
||||
nav-accent-color="#4f5255"
|
||||
nav-hover-text-color="#ffffff"
|
||||
use-path-in-nav-bar="true"
|
||||
nav-item-spacing="relaxed"
|
||||
allow-server-selection="false"
|
||||
show-header="false"
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false">
|
||||
<div slot="nav-logo">
|
||||
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
</div>
|
||||
</rapi-doc>
|
||||
<script>
|
||||
const rapidoc = document.querySelector("rapi-doc");
|
||||
const matcher = window.matchMedia("(prefers-color-scheme: light)");
|
||||
const changer = (ev) => {
|
||||
const style = getComputedStyle(document.documentElement);
|
||||
let bg, text = "";
|
||||
if (matcher.matches) {
|
||||
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
|
||||
text = style.getPropertyValue('--pf-global--Color--300');
|
||||
} else {
|
||||
bg = style.getPropertyValue('--ak-dark-background');
|
||||
text = style.getPropertyValue('--ak-dark-foreground');
|
||||
}
|
||||
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
|
||||
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
|
||||
rapidoc.requestUpdate();
|
||||
};
|
||||
matcher.addEventListener("change", changer);
|
||||
window.addEventListener("load", changer);
|
||||
</script>
|
||||
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
|
||||
{% endblock %}
|
||||
|
@ -1,18 +1,19 @@
|
||||
"""Test API Authentication"""
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from django.utils import timezone
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
@ -36,9 +37,18 @@ class TestAPIAuth(TestCase):
|
||||
|
||||
def test_bearer_valid(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_bearer_valid_deactivated(self):
|
||||
"""Test valid token"""
|
||||
user = create_test_admin_user()
|
||||
user.is_active = False
|
||||
user.save()
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
@ -55,24 +65,28 @@ class TestAPIAuth(TestCase):
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope=SCOPE_AUTHENTIK_API,
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
|
||||
def test_jwt_missing_scope(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="",
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
|
@ -4,6 +4,7 @@ from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPIDecorators(APITestCase):
|
||||
@ -16,7 +17,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_obj_perm_denied(self):
|
||||
"""Test object perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
)
|
||||
@ -25,7 +26,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_other_perm_denied(self):
|
||||
"""Test other perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
assign_perm("authentik_core.view_application", self.user, app)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""core Configs API"""
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
@ -29,6 +29,7 @@ class Capabilities(models.TextChoices):
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
IS_ENTERPRISE = "is_enterprise"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
@ -62,14 +63,16 @@ class ConfigView(APIView):
|
||||
"""Get all capabilities this server instance supports"""
|
||||
caps = []
|
||||
deb_test = settings.DEBUG or settings.TEST
|
||||
if path.ismount(settings.MEDIA_ROOT) or deb_test:
|
||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.y_bool("impersonation"):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG:
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
||||
caps.append(Capabilities.IS_ENTERPRISE)
|
||||
return caps
|
||||
|
||||
def get_config(self) -> ConfigSerializer:
|
||||
|
@ -1,258 +1,50 @@
|
||||
"""api v3 urls"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern
|
||||
from django.views.decorators.cache import cache_page
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from rest_framework import routers
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.meta import AppsViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
from authentik.api.v3.config import ConfigView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
from authentik.core.api.applications import ApplicationViewSet
|
||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
||||
from authentik.core.api.groups import GroupViewSet
|
||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||
from authentik.events.api.events import EventViewSet
|
||||
from authentik.events.api.notification_mappings import NotificationWebhookMappingViewSet
|
||||
from authentik.events.api.notification_rules import NotificationRuleViewSet
|
||||
from authentik.events.api.notification_transports import NotificationTransportViewSet
|
||||
from authentik.events.api.notifications import NotificationViewSet
|
||||
from authentik.flows.api.bindings import FlowStageBindingViewSet
|
||||
from authentik.flows.api.flows import FlowViewSet
|
||||
from authentik.flows.api.stages import StageViewSet
|
||||
from authentik.flows.views.executor import FlowExecutorView
|
||||
from authentik.flows.views.inspector import FlowInspectorView
|
||||
from authentik.outposts.api.outposts import OutpostViewSet
|
||||
from authentik.outposts.api.service_connections import (
|
||||
DockerServiceConnectionViewSet,
|
||||
KubernetesServiceConnectionViewSet,
|
||||
ServiceConnectionViewSet,
|
||||
)
|
||||
from authentik.policies.api.bindings import PolicyBindingViewSet
|
||||
from authentik.policies.api.policies import PolicyViewSet
|
||||
from authentik.policies.dummy.api import DummyPolicyViewSet
|
||||
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
||||
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.plex.api.source import PlexSourceViewSet
|
||||
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
|
||||
from authentik.sources.saml.api.source import SAMLSourceViewSet
|
||||
from authentik.sources.saml.api.source_connection import UserSAMLSourceConnectionViewSet
|
||||
from authentik.stages.authenticator_duo.api import (
|
||||
AuthenticatorDuoStageViewSet,
|
||||
DuoAdminDeviceViewSet,
|
||||
DuoDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_sms.api import (
|
||||
AuthenticatorSMSStageViewSet,
|
||||
SMSAdminDeviceViewSet,
|
||||
SMSDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_static.api import (
|
||||
AuthenticatorStaticStageViewSet,
|
||||
StaticAdminDeviceViewSet,
|
||||
StaticDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_totp.api import (
|
||||
AuthenticatorTOTPStageViewSet,
|
||||
TOTPAdminDeviceViewSet,
|
||||
TOTPDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageViewSet
|
||||
from authentik.stages.authenticator_webauthn.api import (
|
||||
AuthenticateWebAuthnStageViewSet,
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
WebAuthnDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.captcha.api import CaptchaStageViewSet
|
||||
from authentik.stages.consent.api import ConsentStageViewSet, UserConsentViewSet
|
||||
from authentik.stages.deny.api import DenyStageViewSet
|
||||
from authentik.stages.dummy.api import DummyStageViewSet
|
||||
from authentik.stages.email.api import EmailStageViewSet
|
||||
from authentik.stages.identification.api import IdentificationStageViewSet
|
||||
from authentik.stages.invitation.api import InvitationStageViewSet, InvitationViewSet
|
||||
from authentik.stages.password.api import PasswordStageViewSet
|
||||
from authentik.stages.prompt.api import PromptStageViewSet, PromptViewSet
|
||||
from authentik.stages.user_delete.api import UserDeleteStageViewSet
|
||||
from authentik.stages.user_login.api import UserLoginStageViewSet
|
||||
from authentik.stages.user_logout.api import UserLogoutStageViewSet
|
||||
from authentik.stages.user_write.api import UserWriteStageViewSet
|
||||
from authentik.tenants.api import TenantViewSet
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
router.include_format_suffixes = False
|
||||
|
||||
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
|
||||
router.register("admin/apps", AppsViewSet, basename="apps")
|
||||
_other_urls = []
|
||||
for _authentik_app in get_apps():
|
||||
try:
|
||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
continue
|
||||
if not hasattr(api_urls, "api_urlpatterns"):
|
||||
continue
|
||||
urls: list = getattr(api_urls, "api_urlpatterns")
|
||||
for url in urls:
|
||||
if isinstance(url, URLPattern):
|
||||
_other_urls.append(url)
|
||||
else:
|
||||
router.register(*url)
|
||||
LOGGER.debug(
|
||||
"Mounted API URLs",
|
||||
app_name=_authentik_app.name,
|
||||
)
|
||||
|
||||
router.register("core/authenticated_sessions", AuthenticatedSessionViewSet)
|
||||
router.register("core/applications", ApplicationViewSet)
|
||||
router.register("core/groups", GroupViewSet)
|
||||
router.register("core/users", UserViewSet)
|
||||
router.register("core/user_consent", UserConsentViewSet)
|
||||
router.register("core/tokens", TokenViewSet)
|
||||
router.register("core/tenants", TenantViewSet)
|
||||
|
||||
router.register("outposts/instances", OutpostViewSet)
|
||||
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
|
||||
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||
|
||||
router.register("flows/instances", FlowViewSet)
|
||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||
|
||||
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
|
||||
|
||||
router.register("events/events", EventViewSet)
|
||||
router.register("events/notifications", NotificationViewSet)
|
||||
router.register("events/transports", NotificationTransportViewSet)
|
||||
router.register("events/rules", NotificationRuleViewSet)
|
||||
|
||||
router.register("managed/blueprints", BlueprintInstanceViewSet)
|
||||
|
||||
router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/saml", UserSAMLSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
router.register("sources/saml", SAMLSourceViewSet)
|
||||
router.register("sources/oauth", OAuthSourceViewSet)
|
||||
router.register("sources/plex", PlexSourceViewSet)
|
||||
|
||||
router.register("policies/all", PolicyViewSet)
|
||||
router.register("policies/bindings", PolicyBindingViewSet)
|
||||
router.register("policies/expression", ExpressionPolicyViewSet)
|
||||
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||
router.register("policies/password", PasswordPolicyViewSet)
|
||||
router.register("policies/reputation/scores", ReputationViewSet)
|
||||
router.register("policies/reputation", ReputationPolicyViewSet)
|
||||
|
||||
router.register("providers/all", ProviderViewSet)
|
||||
router.register("providers/ldap", LDAPProviderViewSet)
|
||||
router.register("providers/proxy", ProxyProviderViewSet)
|
||||
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
||||
router.register("providers/saml", SAMLProviderViewSet)
|
||||
|
||||
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
||||
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
||||
|
||||
router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
|
||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
router.register("authenticators/sms", SMSDeviceViewSet)
|
||||
router.register("authenticators/static", StaticDeviceViewSet)
|
||||
router.register("authenticators/totp", TOTPDeviceViewSet)
|
||||
router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
|
||||
router.register(
|
||||
"authenticators/admin/all",
|
||||
AdminDeviceViewSet,
|
||||
basename="admin-device",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/duo",
|
||||
DuoAdminDeviceViewSet,
|
||||
basename="admin-duodevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/sms",
|
||||
SMSAdminDeviceViewSet,
|
||||
basename="admin-smsdevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/static",
|
||||
StaticAdminDeviceViewSet,
|
||||
basename="admin-staticdevice",
|
||||
)
|
||||
router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice")
|
||||
router.register(
|
||||
"authenticators/admin/webauthn",
|
||||
WebAuthnAdminDeviceViewSet,
|
||||
basename="admin-webauthndevice",
|
||||
)
|
||||
|
||||
router.register("stages/all", StageViewSet)
|
||||
router.register("stages/authenticator/duo", AuthenticatorDuoStageViewSet)
|
||||
router.register("stages/authenticator/sms", AuthenticatorSMSStageViewSet)
|
||||
router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet)
|
||||
router.register("stages/authenticator/totp", AuthenticatorTOTPStageViewSet)
|
||||
router.register("stages/authenticator/validate", AuthenticatorValidateStageViewSet)
|
||||
router.register("stages/authenticator/webauthn", AuthenticateWebAuthnStageViewSet)
|
||||
router.register("stages/captcha", CaptchaStageViewSet)
|
||||
router.register("stages/consent", ConsentStageViewSet)
|
||||
router.register("stages/deny", DenyStageViewSet)
|
||||
router.register("stages/email", EmailStageViewSet)
|
||||
router.register("stages/identification", IdentificationStageViewSet)
|
||||
router.register("stages/invitation/invitations", InvitationViewSet)
|
||||
router.register("stages/invitation/stages", InvitationStageViewSet)
|
||||
router.register("stages/password", PasswordStageViewSet)
|
||||
router.register("stages/prompt/prompts", PromptViewSet)
|
||||
router.register("stages/prompt/stages", PromptStageViewSet)
|
||||
router.register("stages/user_delete", UserDeleteStageViewSet)
|
||||
router.register("stages/user_login", UserLoginStageViewSet)
|
||||
router.register("stages/user_logout", UserLogoutStageViewSet)
|
||||
router.register("stages/user_write", UserWriteStageViewSet)
|
||||
|
||||
router.register("stages/dummy", DummyStageViewSet)
|
||||
router.register("policies/dummy", DummyPolicyViewSet)
|
||||
|
||||
urlpatterns = (
|
||||
[
|
||||
path("", APIBrowserView.as_view(), name="schema-browser"),
|
||||
]
|
||||
+ router.urls
|
||||
+ _other_urls
|
||||
+ [
|
||||
path(
|
||||
"admin/metrics/",
|
||||
AdministrationMetricsViewSet.as_view(),
|
||||
name="admin_metrics",
|
||||
),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
path("root/config/", ConfigView.as_view(), name="config"),
|
||||
path(
|
||||
"flows/executor/<slug:flow_slug>/",
|
||||
FlowExecutorView.as_view(),
|
||||
name="flow-executor",
|
||||
),
|
||||
path(
|
||||
"flows/inspector/<slug:flow_slug>/",
|
||||
FlowInspectorView.as_view(),
|
||||
name="flow-inspector",
|
||||
),
|
||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||
]
|
||||
)
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Serializer mixin for managed models"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@ -10,7 +11,9 @@ from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
@ -33,15 +36,31 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"""Info about a single blueprint instance file"""
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Ensure the path specified is retrievable"""
|
||||
try:
|
||||
BlueprintInstance(path=path).retrieve()
|
||||
except BlueprintRetrievalFailed as exc:
|
||||
raise ValidationError(exc) from exc
|
||||
"""Ensure the path (if set) specified is retrievable"""
|
||||
if path == "" or path.startswith(OCI_PREFIX):
|
||||
return path
|
||||
files: list[dict] = blueprints_find_dict.delay().get()
|
||||
if path not in [file["path"] for file in files]:
|
||||
raise ValidationError(_("Blueprint file does not exist"))
|
||||
return path
|
||||
|
||||
class Meta:
|
||||
def validate_content(self, content: str) -> str:
|
||||
"""Ensure content (if set) is a valid blueprint"""
|
||||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
|
||||
raise ValidationError(_("Either path or content must be set."))
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = BlueprintInstance
|
||||
fields = [
|
||||
"pk",
|
||||
@ -54,6 +73,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"enabled",
|
||||
"managed_models",
|
||||
"metadata",
|
||||
"content",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"status": {"read_only": True},
|
||||
|
@ -55,11 +55,12 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
def reconcile_blueprints_discover(self):
|
||||
def reconcile_blueprints_discovery(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
|
||||
blueprints_discover.delay()
|
||||
blueprints_discovery.delay()
|
||||
clear_failed_blueprints.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
|
@ -19,10 +19,8 @@ class Command(BaseCommand):
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer(content)
|
||||
valid, logs = importer.validate()
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
for log in logs:
|
||||
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
|
@ -1,12 +1,17 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
from json import dumps, loads
|
||||
from pathlib import Path
|
||||
from json import dumps
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from django.db.models import Model
|
||||
from drf_jsonschema_serializer.convert import field_to_converter
|
||||
from rest_framework.fields import Field, JSONField, UUIDField
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import registry
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -16,21 +21,146 @@ class Command(BaseCommand):
|
||||
|
||||
schema: dict
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"required": ["version", "entries"],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1,
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [],
|
||||
},
|
||||
},
|
||||
},
|
||||
"$defs": {},
|
||||
}
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
path = Path(__file__).parent.joinpath("./schema_template.json")
|
||||
with open(path, "r", encoding="utf-8") as _template_file:
|
||||
self.schema = loads(_template_file.read())
|
||||
self.set_model_allowed()
|
||||
self.stdout.write(dumps(self.schema, indent=4))
|
||||
self.build()
|
||||
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
|
||||
|
||||
def set_model_allowed(self):
|
||||
"""Set model enum"""
|
||||
model_names = []
|
||||
@staticmethod
|
||||
def json_default(value: Any) -> Any:
|
||||
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
|
||||
return str(value)
|
||||
|
||||
def build(self):
|
||||
"""Build all models into the schema"""
|
||||
for model in registry.get_models():
|
||||
if not is_model_allowed(model):
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class = model.serializer()
|
||||
else:
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
model_instance: Model = model()
|
||||
if not isinstance(model_instance, SerializerModel):
|
||||
continue
|
||||
serializer_class = model_instance.serializer
|
||||
serializer = serializer_class(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: False,
|
||||
}
|
||||
)
|
||||
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
|
||||
self.schema["properties"]["entries"]["items"]["oneOf"].append(
|
||||
self.template_entry(model_path, serializer)
|
||||
)
|
||||
|
||||
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
|
||||
"""Template entry for a single model"""
|
||||
model_schema = self.to_jsonschema(serializer)
|
||||
model_schema["required"] = []
|
||||
def_name = f"model_{model_path}"
|
||||
def_path = f"#/$defs/{def_name}"
|
||||
self.schema["$defs"][def_name] = model_schema
|
||||
return {
|
||||
"type": "object",
|
||||
"required": ["model", "identifiers"],
|
||||
"properties": {
|
||||
"model": {"const": model_path},
|
||||
"id": {"type": "string"},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["absent", "present", "created"],
|
||||
"default": "present",
|
||||
},
|
||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||
"attrs": {"$ref": def_path},
|
||||
"identifiers": {"$ref": def_path},
|
||||
},
|
||||
}
|
||||
|
||||
def field_to_jsonschema(self, field: Field) -> dict:
|
||||
"""Convert a single field to json schema"""
|
||||
if isinstance(field, Serializer):
|
||||
result = self.to_jsonschema(field)
|
||||
else:
|
||||
try:
|
||||
converter = field_to_converter[field]
|
||||
result = converter.convert(field)
|
||||
except KeyError:
|
||||
if isinstance(field, JSONField):
|
||||
result = {"type": "object", "additionalProperties": True}
|
||||
elif isinstance(field, UUIDField):
|
||||
result = {"type": "string", "format": "uuid"}
|
||||
else:
|
||||
raise
|
||||
if field.label:
|
||||
result["title"] = field.label
|
||||
if field.help_text:
|
||||
result["description"] = field.help_text
|
||||
return self.clean_result(result)
|
||||
|
||||
def clean_result(self, result: dict) -> dict:
|
||||
"""Remove enumNames from result, recursively"""
|
||||
result.pop("enumNames", None)
|
||||
for key, value in result.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = self.clean_result(value)
|
||||
return result
|
||||
|
||||
def to_jsonschema(self, serializer: Serializer) -> dict:
|
||||
"""Convert serializer to json schema"""
|
||||
properties = {}
|
||||
required = []
|
||||
for name, field in serializer.fields.items():
|
||||
if field.read_only:
|
||||
continue
|
||||
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
|
||||
model_names.sort()
|
||||
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names
|
||||
sub_schema = self.field_to_jsonschema(field)
|
||||
if field.required:
|
||||
required.append(name)
|
||||
properties[name] = sub_schema
|
||||
|
||||
result = {"type": "object", "properties": properties}
|
||||
if required:
|
||||
result["required"] = required
|
||||
return result
|
||||
|
@ -1,99 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "http://example.com/example.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"default": {},
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"labels": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$id": "#entry",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"placeholder"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Commonly available field, may not exist on all models"
|
||||
}
|
||||
},
|
||||
"default": {},
|
||||
"additionalProperties": true
|
||||
},
|
||||
"identifiers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"properties": {
|
||||
"pk": {
|
||||
"description": "Commonly available field, may not exist on all models",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -6,7 +6,6 @@ from pathlib import Path
|
||||
import django.contrib.postgres.fields
|
||||
from dacite.core import from_dict
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from yaml import load
|
||||
@ -15,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
"""Check if blueprint should be imported"""
|
||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
@ -46,7 +45,7 @@ def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path:
|
||||
enabled=True,
|
||||
managed_models=[],
|
||||
last_applied_hash="",
|
||||
metadata=metadata,
|
||||
metadata=metadata or {},
|
||||
)
|
||||
instance.save()
|
||||
|
||||
@ -71,7 +70,6 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [("authentik_flows", "0001_initial")]
|
||||
@ -86,7 +84,12 @@ class Migration(migrations.Migration):
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and"
|
||||
" updated automatically. This is flag only indicates that an object can"
|
||||
" be overwritten by migrations. You can still modify the objects via"
|
||||
" the API, but expect changes to be overwritten in a later update."
|
||||
),
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
|
@ -0,0 +1,22 @@
|
||||
# Generated by Django 4.1.5 on 2023-01-10 19:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="blueprintinstance",
|
||||
name="content",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="path",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
]
|
@ -0,0 +1,31 @@
|
||||
# Generated by Django 4.1.7 on 2023-04-28 10:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.lib.migrations import fallback_names
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0002_blueprintinstance_content"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="name",
|
||||
field=models.TextField(unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
]
|
@ -1,58 +1,43 @@
|
||||
"""blueprint models"""
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX, BlueprintOCIClient, OCIException
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class BlueprintRetrievalFailed(SentryIgnoredException):
|
||||
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
|
||||
"""Error raised when we are unable to fetch the blueprint contents, whether it be HTTP files
|
||||
not being accessible or local files not being readable"""
|
||||
|
||||
|
||||
class ManagedModel(models.Model):
|
||||
"""Model which can be managed by authentik exclusively"""
|
||||
"""Model that can be managed by authentik exclusively"""
|
||||
|
||||
managed = models.TextField(
|
||||
default=None,
|
||||
null=True,
|
||||
verbose_name=_("Managed by authentik"),
|
||||
help_text=_(
|
||||
(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
)
|
||||
"Objects that are managed by authentik. These objects are created and updated "
|
||||
"automatically. This flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
@ -72,9 +57,10 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
|
||||
name = models.TextField()
|
||||
name = models.TextField(unique=True)
|
||||
metadata = models.JSONField(default=dict)
|
||||
path = models.TextField()
|
||||
path = models.TextField(default="", blank=True)
|
||||
content = models.TextField(default="", blank=True)
|
||||
context = models.JSONField(default=dict)
|
||||
last_applied = models.DateTimeField(auto_now=True)
|
||||
last_applied_hash = models.TextField()
|
||||
@ -86,61 +72,32 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
def retrieve_oci(self) -> str:
|
||||
"""Get blueprint from an OCI registry"""
|
||||
url = urlparse(self.path)
|
||||
ref = "latest"
|
||||
path = url.path[1:]
|
||||
if ":" in url.path:
|
||||
path, _, ref = path.partition(":")
|
||||
client = NewClient(
|
||||
f"{url.scheme}://{url.hostname}",
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(url.username, url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
|
||||
manifest_request = client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
|
||||
try:
|
||||
manifest_response = client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
manifests = client.fetch_manifests()
|
||||
return client.fetch_blobs(manifests)
|
||||
except OCIException as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise BlueprintRetrievalFailed(manifest["errors"])
|
||||
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
|
||||
if not blob:
|
||||
raise BlueprintRetrievalFailed("Blob not found")
|
||||
|
||||
blob_request = client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
def retrieve_file(self) -> str:
|
||||
"""Get blueprint from path"""
|
||||
try:
|
||||
blob_response = client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
base = Path(CONFIG.y("blueprints_dir"))
|
||||
full_path = base.joinpath(Path(self.path)).resolve()
|
||||
if not str(full_path).startswith(str(base.resolve())):
|
||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
except (IOError, OSError) as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve(self) -> str:
|
||||
"""Retrieve blueprint contents"""
|
||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||
if full_path.exists():
|
||||
LOGGER.debug("Blueprint path exists locally", instance=self)
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
return self.retrieve_oci()
|
||||
if self.path.startswith(OCI_PREFIX):
|
||||
return self.retrieve_oci()
|
||||
if self.path != "":
|
||||
return self.retrieve_file()
|
||||
return self.content
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
@ -152,7 +109,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
|
@ -5,8 +5,13 @@ from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"blueprints_v1_cleanup": {
|
||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""Blueprint helpers"""
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
@ -45,13 +44,3 @@ def reconcile_app(app_name: str):
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||
fixture = _fixture.read()
|
||||
try:
|
||||
return fixture % kwargs
|
||||
except TypeError:
|
||||
return fixture
|
||||
|
41
authentik/blueprints/tests/fixtures/conditional_fields.yaml
vendored
Normal file
41
authentik/blueprints/tests/fixtures/conditional_fields.yaml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
version: 1
|
||||
metadata:
|
||||
name: test conditional fields
|
||||
labels:
|
||||
blueprints.goauthentik.io/description: |
|
||||
Some models have conditional fields that are only allowed in blueprint contexts
|
||||
- Token (key)
|
||||
- Application (icon)
|
||||
- Source (icon)
|
||||
- Flow (background)
|
||||
entries:
|
||||
- model: authentik_core.token
|
||||
identifiers:
|
||||
identifier: %(uid)s-token
|
||||
attrs:
|
||||
key: %(uid)s
|
||||
user: %(user)s
|
||||
intent: api
|
||||
- model: authentik_core.application
|
||||
identifiers:
|
||||
slug: %(uid)s-app
|
||||
attrs:
|
||||
name: %(uid)s-app
|
||||
icon: https://goauthentik.io/img/icon.png
|
||||
- model: authentik_sources_oauth.oauthsource
|
||||
identifiers:
|
||||
slug: %(uid)s-source
|
||||
attrs:
|
||||
name: %(uid)s-source
|
||||
provider_type: azuread
|
||||
consumer_key: %(uid)s
|
||||
consumer_secret: %(uid)s
|
||||
icon: https://goauthentik.io/img/icon.png
|
||||
- model: authentik_flows.flow
|
||||
identifiers:
|
||||
slug: %(uid)s-flow
|
||||
attrs:
|
||||
name: %(uid)s-flow
|
||||
title: %(uid)s-flow
|
||||
designation: authentication
|
||||
background: https://goauthentik.io/img/icon.png
|
21
authentik/blueprints/tests/fixtures/conditions_fulfilled.yaml
vendored
Normal file
21
authentik/blueprints/tests/fixtures/conditions_fulfilled.yaml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
21
authentik/blueprints/tests/fixtures/conditions_not_fulfilled.yaml
vendored
Normal file
21
authentik/blueprints/tests/fixtures/conditions_not_fulfilled.yaml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,7 +1,13 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: absent
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: absent
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
expression: |
|
||||
return True
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
state: absent
|
||||
|
@ -1,10 +1,10 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: created
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: created
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
|
@ -1,10 +1,10 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: present
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: present
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
|
@ -1,12 +1,13 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
required: true
|
||||
placeholder: Username
|
||||
order: 0
|
||||
- identifiers:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
name: qwerweqrq
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
required: true
|
||||
placeholder: Username
|
||||
order: 0
|
||||
|
152
authentik/blueprints/tests/fixtures/tags.yaml
vendored
152
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@ -1,10 +1,150 @@
|
||||
version: 1
|
||||
context:
|
||||
foo: bar
|
||||
policy_property: name
|
||||
policy_property_value: foo-bar-baz-qux
|
||||
sequence:
|
||||
- foo
|
||||
- bar
|
||||
mapping:
|
||||
key1: value
|
||||
key2: 2
|
||||
entries:
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s, !Context foo, !Context bar]
|
||||
id: default-source-enrollment-if-username
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%s", present]
|
||||
identifiers:
|
||||
slug: test
|
||||
attrs:
|
||||
name: test
|
||||
provider_type: github
|
||||
consumer_key: !Env foo
|
||||
consumer_secret: !Env [bar, baz]
|
||||
authentication_flow:
|
||||
!Find [
|
||||
authentik_flows.Flow,
|
||||
[slug, default-source-authentication],
|
||||
]
|
||||
enrollment_flow:
|
||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s-%s, !Context foo, !Context bar, qux]
|
||||
id: policy
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
!Find [
|
||||
authentik_policies_expression.expressionpolicy,
|
||||
[
|
||||
!Context policy_property,
|
||||
!Context policy_property_value,
|
||||
],
|
||||
[expression, return True],
|
||||
],
|
||||
suffix,
|
||||
]
|
||||
policy_pk2: !Format ["%s-%s", !KeyOf policy, suffix]
|
||||
boolAnd:
|
||||
!Condition [AND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolNand:
|
||||
!Condition [NAND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolOr:
|
||||
!Condition [
|
||||
OR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolNor:
|
||||
!Condition [
|
||||
NOR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolXor:
|
||||
!Condition [XOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolXnor:
|
||||
!Condition [XNOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolComplex:
|
||||
!Condition [
|
||||
XNOR,
|
||||
!Condition [AND, !Context non_existing],
|
||||
!Condition [NOR, a string],
|
||||
!Condition [XOR, null],
|
||||
]
|
||||
if_true_complex:
|
||||
!If [
|
||||
true,
|
||||
{
|
||||
dictionary:
|
||||
{
|
||||
with: { keys: "and_values" },
|
||||
and_nested_custom_tags:
|
||||
!Format ["foo-%s", !Context foo],
|
||||
},
|
||||
},
|
||||
null,
|
||||
]
|
||||
if_false_complex:
|
||||
!If [
|
||||
!Condition [AND, false],
|
||||
null,
|
||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||
]
|
||||
if_true_simple: !If [!Context foo, true, text]
|
||||
if_false_simple: !If [null, false, 2]
|
||||
enumerate_mapping_to_mapping: !Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
||||
]
|
||||
enumerate_mapping_to_sequence: !Enumerate [
|
||||
!Context mapping,
|
||||
SEQ,
|
||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_sequence: !Enumerate [
|
||||
!Context sequence,
|
||||
SEQ,
|
||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_mapping: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[!Format ["index: %d", !Index 0], !Value 0]
|
||||
]
|
||||
nested_complex_enumeration: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[
|
||||
!Index 0,
|
||||
!Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[
|
||||
!Format ["%s", !Index 0],
|
||||
[
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
||||
{
|
||||
outer_value: !Value 1,
|
||||
outer_index: !Index 1,
|
||||
middle_value: !Value 0,
|
||||
middle_index: !Index 0
|
||||
}
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
identifiers:
|
||||
name: test
|
||||
conditions:
|
||||
- !Condition [AND, true, true, text]
|
||||
- true
|
||||
- text
|
||||
model: authentik_core.group
|
||||
|
@ -1,34 +1,15 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract:
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for app in apps.get_app_configs():
|
||||
if not app.label.startswith("authentik"):
|
||||
continue
|
||||
for model in app.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
|
||||
def test_retrieve_file(self):
|
||||
"""Test retrieve_file"""
|
||||
instance = BlueprintInstance.objects.create(name=generate_id(), path="../etc/hosts")
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
instance.retrieve()
|
||||
|
@ -2,7 +2,8 @@
|
||||
from django.test import TransactionTestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.blueprints.models import OCI_MEDIA_TYPE, BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
|
||||
|
||||
|
||||
class TestBlueprintOCI(TransactionTestCase):
|
||||
@ -26,8 +27,31 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
|
||||
self.assertEqual(
|
||||
BlueprintInstance(
|
||||
path="https://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci(),
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve(),
|
||||
"foo",
|
||||
)
|
||||
|
||||
def test_successful_port(self):
|
||||
"""Successful retrieval with custom port"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io:1234/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io:1234/v2/goauthentik/blueprints/test/blobs/foo", text="foo")
|
||||
|
||||
self.assertEqual(
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io:1234/goauthentik/blueprints/test:latest"
|
||||
).retrieve(),
|
||||
"foo",
|
||||
)
|
||||
|
||||
@ -40,7 +64,7 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="https://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_manifests_error_response(self):
|
||||
@ -53,7 +77,7 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="https://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_no_matching_blob(self):
|
||||
@ -72,7 +96,7 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
)
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="https://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_blob_error(self):
|
||||
@ -93,5 +117,5 @@ class TestBlueprintOCI(TransactionTestCase):
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="https://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/90-default-tenant.yaml")
|
||||
@apply_blueprint("default/default-tenant.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
|
34
authentik/blueprints/tests/test_serializer_models.py
Normal file
34
authentik/blueprints/tests/test_serializer_models.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for app in apps.get_app_configs():
|
||||
if not app.label.startswith("authentik"):
|
||||
continue
|
||||
for model in app.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
|
@ -1,13 +1,17 @@
|
||||
"""Test blueprints v1"""
|
||||
from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||
from authentik.stages.user_login.models import UserLoginStage
|
||||
|
||||
@ -20,12 +24,61 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
def test_validated_import_dict_identifiers(self):
|
||||
"""Test importing blueprints with dict identifiers."""
|
||||
Group.objects.filter(name__istartswith="test").delete()
|
||||
|
||||
Group.objects.create(
|
||||
name="test1",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["a_value", "other_value"],
|
||||
},
|
||||
)
|
||||
Group.objects.create(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test999",
|
||||
# All attributes used as identifiers are kept and merged with the
|
||||
# new attributes declared in the blueprint
|
||||
attributes={"key": ["updated_value"], "other_key": ["other_value"]},
|
||||
)
|
||||
)
|
||||
self.assertFalse(Group.objects.filter(name="test1"))
|
||||
|
||||
def test_export_validate_import(self):
|
||||
"""Test export and validate it"""
|
||||
@ -60,25 +113,113 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
|
||||
def test_import_yaml_tags(self):
|
||||
"""Test some yaml tags"""
|
||||
ExpressionPolicy.objects.filter(name="foo-foo-bar").delete()
|
||||
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(ExpressionPolicy.objects.filter(name="foo-foo-bar"))
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
self.assertTrue(policy)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
attributes={
|
||||
"policy_pk1": str(policy.pk) + "-suffix",
|
||||
"policy_pk2": str(policy.pk) + "-suffix",
|
||||
"boolAnd": True,
|
||||
"boolNand": False,
|
||||
"boolOr": True,
|
||||
"boolNor": False,
|
||||
"boolXor": True,
|
||||
"boolXnor": False,
|
||||
"boolComplex": True,
|
||||
"if_true_complex": {
|
||||
"dictionary": {
|
||||
"with": {"keys": "and_values"},
|
||||
"and_nested_custom_tags": "foo-bar",
|
||||
}
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
slug="test",
|
||||
consumer_key=environ["foo"],
|
||||
)
|
||||
)
|
||||
|
||||
def test_export_validate_import_policies(self):
|
||||
"""Test export and validate it"""
|
||||
@ -115,15 +256,21 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
with transaction_rollback():
|
||||
# First stage fields
|
||||
username_prompt = Prompt.objects.create(
|
||||
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||
name=generate_id(),
|
||||
field_key="username",
|
||||
label="Username",
|
||||
order=0,
|
||||
type=FieldTypes.TEXT,
|
||||
)
|
||||
password = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password",
|
||||
label="Password",
|
||||
order=1,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
password_repeat = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password_repeat",
|
||||
label="Password (repeat)",
|
||||
order=2,
|
||||
|
@ -43,3 +43,39 @@ class TestBlueprintsV1API(APITestCase):
|
||||
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
|
||||
def test_api_oci(self):
|
||||
"""Test validation with OCI path"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={"name": "foo", "path": "oci://foo/bar"},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
def test_api_blank(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
|
||||
)
|
||||
|
||||
def test_api_content(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
"content": '{"version": 3}',
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||
)
|
||||
|
47
authentik/blueprints/tests/test_v1_conditional_fields.py
Normal file
47
authentik/blueprints/tests/test_v1_conditional_fields.py
Normal file
@ -0,0 +1,47 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.core.models import Application, Token
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
|
||||
|
||||
class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
||||
"""Test Blueprints conditional fields"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
user = create_test_admin_user()
|
||||
self.uid = generate_id()
|
||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
def test_token(self):
|
||||
"""Test token"""
|
||||
token = Token.objects.filter(identifier=f"{self.uid}-token").first()
|
||||
self.assertIsNotNone(token)
|
||||
self.assertEqual(token.key, self.uid)
|
||||
|
||||
def test_application(self):
|
||||
"""Test application"""
|
||||
app = Application.objects.filter(slug=f"{self.uid}-app").first()
|
||||
self.assertIsNotNone(app)
|
||||
self.assertEqual(app.meta_icon, "https://goauthentik.io/img/icon.png")
|
||||
|
||||
def test_source(self):
|
||||
"""Test source"""
|
||||
source = OAuthSource.objects.filter(slug=f"{self.uid}-source").first()
|
||||
self.assertIsNotNone(source)
|
||||
self.assertEqual(source.icon, "https://goauthentik.io/img/icon.png")
|
||||
|
||||
def test_flow(self):
|
||||
"""Test flow"""
|
||||
flow = Flow.objects.filter(slug=f"{self.uid}-flow").first()
|
||||
self.assertIsNotNone(flow)
|
||||
self.assertEqual(flow.background, "https://goauthentik.io/img/icon.png")
|
43
authentik/blueprints/tests/test_v1_conditions.py
Normal file
43
authentik/blueprints/tests/test_v1_conditions.py
Normal file
@ -0,0 +1,43 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test Blueprints conditions attribute"""
|
||||
|
||||
def test_conditions_fulfilled(self):
|
||||
"""Test conditions fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_fixture(
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug1).first()
|
||||
self.assertEqual(flow.slug, flow_slug1)
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug2).first()
|
||||
self.assertEqual(flow.slug, flow_slug2)
|
||||
|
||||
def test_conditions_not_fulfilled(self):
|
||||
"""Test conditions not fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_fixture(
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug1))
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug2))
|
@ -1,10 +1,10 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
@ -13,7 +13,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_present(self):
|
||||
"""Test state present"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -39,7 +39,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_created(self):
|
||||
"""Test state created"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -65,7 +65,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_absent(self):
|
||||
"""Test state absent"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -74,7 +74,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
@ -6,7 +6,7 @@ from django.test import TransactionTestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -53,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
@ -67,25 +67,8 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_updated(self):
|
||||
"""Test valid file"""
|
||||
BlueprintInstance.objects.filter(name="foo").delete()
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
self.assertEqual(
|
||||
BlueprintInstance.objects.first().last_applied_hash,
|
||||
(
|
||||
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1b"
|
||||
"d1f9b3526871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
self.assertEqual(BlueprintInstance.objects.first().metadata, {})
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
@ -98,19 +81,45 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
BlueprintInstance.objects.first().last_applied_hash,
|
||||
blueprint.last_applied_hash,
|
||||
(
|
||||
"fc62fea96067da8592bdf90927246d0ca150b045447df93b0652a0e20a8bc327"
|
||||
"681510b5db37ea98759c61f9a98dd2381f46a3b5a2da69dfb45158897f14e824"
|
||||
"b86ec439b3857350714f070d2833490e736d9155d3d97b2cac13f3b352223e5a"
|
||||
"1adbf8ec56fa616d46090cc4773ff9e46c4e509fde96b97de87dd21fa329ca1a"
|
||||
),
|
||||
)
|
||||
self.assertEqual(blueprint.metadata, {"labels": {}, "name": "foo"})
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": "foo",
|
||||
"labels": {
|
||||
"foo": "bar",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
(
|
||||
"87b68b10131d2c9751ed308bba38f04734b9e2cdf8532ed617bc52979b063c49"
|
||||
"2564f33f3d20ab9d5f0fd9e6eb77a13942e060199f147789cb7afab9690e72b5"
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
BlueprintInstance.objects.first().metadata,
|
||||
blueprint.metadata,
|
||||
{
|
||||
"name": "foo",
|
||||
"labels": {},
|
||||
"labels": {"foo": "bar"},
|
||||
},
|
||||
)
|
||||
|
||||
|
6
authentik/blueprints/urls.py
Normal file
6
authentik/blueprints/urls.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""API URLs"""
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("managed/blueprints", BlueprintInstanceViewSet),
|
||||
]
|
@ -1,10 +1,15 @@
|
||||
"""transfer common classes"""
|
||||
from collections import OrderedDict
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
from functools import reduce
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.fields import Field
|
||||
@ -53,16 +58,21 @@ class BlueprintEntryDesiredState(Enum):
|
||||
class BlueprintEntry:
|
||||
"""Single entry of a blueprint"""
|
||||
|
||||
model: str
|
||||
state: BlueprintEntryDesiredState = field(default=BlueprintEntryDesiredState.PRESENT)
|
||||
model: Union[str, "YAMLTag"]
|
||||
state: Union[BlueprintEntryDesiredState, "YAMLTag"] = field(
|
||||
default=BlueprintEntryDesiredState.PRESENT
|
||||
)
|
||||
conditions: list[Any] = field(default_factory=list)
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
id: Optional[str] = None
|
||||
|
||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||
|
||||
def __post_init__(self, *args, **kwargs) -> None:
|
||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||
@ -79,17 +89,46 @@ class BlueprintEntry:
|
||||
attrs=all_attrs,
|
||||
)
|
||||
|
||||
def _get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
raise ValueError("depth must be a positive number or zero")
|
||||
|
||||
if context_tag_type:
|
||||
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
|
||||
else:
|
||||
contexts = self.__tag_contexts
|
||||
|
||||
try:
|
||||
return contexts[-(depth + 1)]
|
||||
except IndexError:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
val = copy(value)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.append(value)
|
||||
|
||||
if isinstance(value, YAMLTag):
|
||||
return value.resolve(self, blueprint)
|
||||
val = value.resolve(self, blueprint)
|
||||
|
||||
if isinstance(value, dict):
|
||||
for key, inner_value in value.items():
|
||||
value[key] = self.tag_resolver(inner_value, blueprint)
|
||||
val[key] = self.tag_resolver(inner_value, blueprint)
|
||||
if isinstance(value, list):
|
||||
for idx, inner_value in enumerate(value):
|
||||
value[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
return value
|
||||
val[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.pop()
|
||||
|
||||
return val
|
||||
|
||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
@ -99,6 +138,18 @@ class BlueprintEntry:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.identifiers, blueprint)
|
||||
|
||||
def get_state(self, blueprint: "Blueprint") -> BlueprintEntryDesiredState:
|
||||
"""Get the blueprint state, with yaml tags resolved if present"""
|
||||
return BlueprintEntryDesiredState(self.tag_resolver(self.state, blueprint))
|
||||
|
||||
def get_model(self, blueprint: "Blueprint") -> str:
|
||||
"""Get the blueprint model, with yaml tags resolved if present"""
|
||||
return str(self.tag_resolver(self.model, blueprint))
|
||||
|
||||
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
|
||||
"""Check all conditions of this entry match (evaluate to True)"""
|
||||
return all(self.tag_resolver(self.conditions, blueprint))
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintMetadata:
|
||||
@ -127,12 +178,19 @@ class YAMLTag:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class YAMLTagContext:
|
||||
"""Base class for all YAML Tag Contexts"""
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag context logic"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class KeyOf(YAMLTag):
|
||||
"""Reference another object by their ID"""
|
||||
|
||||
id_from: str
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.id_from = node.value
|
||||
@ -153,13 +211,31 @@ class KeyOf(YAMLTag):
|
||||
)
|
||||
|
||||
|
||||
class Env(YAMLTag):
|
||||
"""Lookup environment variable with optional default"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return getenv(self.key, self.default)
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
"""Lookup key from instance context"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
@ -182,17 +258,23 @@ class Format(YAMLTag):
|
||||
format_string: str
|
||||
args: list[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(raw_node.value)
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
try:
|
||||
return self.format_string % tuple(self.args)
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
@ -200,15 +282,12 @@ class Format(YAMLTag):
|
||||
class Find(YAMLTag):
|
||||
"""Find any object"""
|
||||
|
||||
model_name: str
|
||||
model_name: str | YAMLTag
|
||||
conditions: list[list]
|
||||
|
||||
model_class: type[Model]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.model_name = node.value[0].value
|
||||
self.model_class = apps.get_model(*self.model_name.split("."))
|
||||
self.model_name = loader.construct_object(node.value[0])
|
||||
self.conditions = []
|
||||
for raw_node in node.value[1:]:
|
||||
values = []
|
||||
@ -217,15 +296,227 @@ class Find(YAMLTag):
|
||||
self.conditions.append(values)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.model_name, YAMLTag):
|
||||
model_name = self.model_name.resolve(entry, blueprint)
|
||||
else:
|
||||
model_name = self.model_name
|
||||
|
||||
model_class = apps.get_model(*model_name.split("."))
|
||||
|
||||
query = Q()
|
||||
for cond in self.conditions:
|
||||
query &= Q(**{cond[0]: cond[1]})
|
||||
instance = self.model_class.objects.filter(query).first()
|
||||
if isinstance(cond[0], YAMLTag):
|
||||
query_key = cond[0].resolve(entry, blueprint)
|
||||
else:
|
||||
query_key = cond[0]
|
||||
if isinstance(cond[1], YAMLTag):
|
||||
query_value = cond[1].resolve(entry, blueprint)
|
||||
else:
|
||||
query_value = cond[1]
|
||||
query &= Q(**{query_key: query_value})
|
||||
instance = model_class.objects.filter(query).first()
|
||||
if instance:
|
||||
return instance.pk
|
||||
return None
|
||||
|
||||
|
||||
class Condition(YAMLTag):
|
||||
"""Convert all values to a single boolean"""
|
||||
|
||||
mode: Literal["AND", "NAND", "OR", "NOR", "XOR", "XNOR"]
|
||||
args: list[Any]
|
||||
|
||||
_COMPARATORS = {
|
||||
# Using all and any here instead of from operator import iand, ior
|
||||
# to improve performance
|
||||
"AND": all,
|
||||
"NAND": lambda args: not all(args),
|
||||
"OR": any,
|
||||
"NOR": lambda args: not any(args),
|
||||
"XOR": lambda args: reduce(ixor, args) if len(args) > 1 else args[0],
|
||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
"""Select YAML to use based on condition"""
|
||||
|
||||
condition: Any
|
||||
when_true: Any
|
||||
when_false: Any
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.condition = loader.construct_object(node.value[0])
|
||||
self.when_true = loader.construct_object(node.value[1])
|
||||
self.when_false = loader.construct_object(node.value[2])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.condition, YAMLTag):
|
||||
condition = self.condition.resolve(entry, blueprint)
|
||||
else:
|
||||
condition = self.condition
|
||||
|
||||
try:
|
||||
return entry.tag_resolver(
|
||||
self.when_true if condition else self.when_false,
|
||||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
"""Iterate over an iterable."""
|
||||
|
||||
iterable: YAMLTag | Iterable
|
||||
item_body: Any
|
||||
output_body: Literal["SEQ", "MAP"]
|
||||
|
||||
_OUTPUT_BODIES = {
|
||||
"SEQ": (list, lambda a, b: [*a, b]),
|
||||
"MAP": (
|
||||
dict,
|
||||
lambda a, b: always_merger.merge(
|
||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = node.value[1].value
|
||||
self.item_body = loader.construct_object(node.value[2])
|
||||
self.__current_context: tuple[Any, Any] = tuple()
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return self.__current_context
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
iterable = self.iterable.resolve(entry, blueprint)
|
||||
else:
|
||||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
iterable = tuple(iterable.items())
|
||||
else:
|
||||
iterable = tuple(enumerate(iterable))
|
||||
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
self.__current_context = tuple()
|
||||
|
||||
try:
|
||||
for item in iterable:
|
||||
self.__current_context = item
|
||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class EnumeratedItem(YAMLTag):
|
||||
"""Get the current item value and index provided by an Enumerate tag context"""
|
||||
|
||||
depth: int
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
context_tag: Enumerate = entry._get_tag_context(
|
||||
depth=self.depth,
|
||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
||||
class Index(EnumeratedItem):
|
||||
"""Get the current item index provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
"""Get the current item value provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
"""Dump dataclasses to yaml"""
|
||||
|
||||
@ -266,6 +557,12 @@ class BlueprintLoader(SafeLoader):
|
||||
self.add_constructor("!Find", Find)
|
||||
self.add_constructor("!Context", Context)
|
||||
self.add_constructor("!Format", Format)
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
|
||||
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
|
@ -7,6 +7,7 @@ from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
@ -34,11 +35,15 @@ from authentik.core.models import (
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
@ -60,6 +65,8 @@ def is_model_allowed(model: type[Model]) -> bool:
|
||||
PolicyBindingModel,
|
||||
# Classes that have other dependencies
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
FlowToken,
|
||||
)
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
@ -132,23 +139,35 @@ class Importer:
|
||||
main_query = Q(pk=attrs["pk"])
|
||||
sub_query = Q()
|
||||
for identifier, value in attrs.items():
|
||||
if isinstance(value, dict):
|
||||
continue
|
||||
if identifier == "pk":
|
||||
continue
|
||||
sub_query &= Q(**{identifier: value})
|
||||
if isinstance(value, dict):
|
||||
sub_query &= Q(**{f"{identifier}__contains": value})
|
||||
else:
|
||||
sub_query &= Q(**{identifier: value})
|
||||
|
||||
return main_query | sub_query
|
||||
|
||||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
"""Validate a single entry"""
|
||||
model_app_label, model_name = entry.model.split(".")
|
||||
if not entry.check_all_conditions_match(self.__import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
return None
|
||||
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||
serializer = serializer_class(
|
||||
data=entry.get_attrs(self.__import),
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -156,8 +175,6 @@ class Importer:
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
if entry.identifiers == {}:
|
||||
raise EntryInvalidError("No identifiers")
|
||||
|
||||
# If we try to validate without referencing a possible instance
|
||||
# we'll get a duplicate error, hence we load the model here and return
|
||||
@ -169,12 +186,20 @@ class Importer:
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
updated_identifiers[f"{key}"] = value["pk"]
|
||||
existing_models = model.objects.filter(self.__query_from_identifier(updated_identifiers))
|
||||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
if entry.state == BlueprintEntryDesiredState.CREATED:
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
|
||||
self.logger.debug("instance exists, skipping")
|
||||
return None
|
||||
self.logger.debug(
|
||||
@ -198,10 +223,15 @@ class Importer:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError(exc) from exc
|
||||
full_data.update(updated_identifiers)
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||
serializer: Serializer = model().serializer(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
**serializer_kwargs,
|
||||
)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -219,15 +249,14 @@ class Importer:
|
||||
raise IntegrityError
|
||||
except IntegrityError:
|
||||
return False
|
||||
else:
|
||||
self.logger.debug("Committing changes")
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self.__import.entries:
|
||||
model_app_label, model_name = entry.model.split(".")
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
try:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
@ -244,7 +273,8 @@ class Importer:
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
if entry.state in [
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
]:
|
||||
@ -253,9 +283,9 @@ class Importer:
|
||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||
entry._state = BlueprintEntryState(model)
|
||||
self.logger.debug("updated model", model=model)
|
||||
elif entry.state == BlueprintEntryDesiredState.ABSENT:
|
||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||
instance: Optional[Model] = serializer.instance
|
||||
if instance:
|
||||
if instance.pk:
|
||||
instance.delete()
|
||||
self.logger.debug("deleted model", mode=instance)
|
||||
continue
|
||||
@ -269,7 +299,7 @@ class Importer:
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, []
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
|
@ -3,3 +3,4 @@
|
||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
||||
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"
|
||||
|
@ -56,5 +56,4 @@ class MetaApplyBlueprint(BaseMetaModel):
|
||||
return ApplyBlueprintMetaSerializer
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
@ -14,7 +14,6 @@ class BaseMetaModel(Model):
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
|
104
authentik/blueprints/v1/oci.py
Normal file
104
authentik/blueprints/v1/oci.py
Normal file
@ -0,0 +1,104 @@
|
||||
"""OCI Client"""
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from structlog import get_logger
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
OCI_PREFIX = "oci://"
|
||||
|
||||
|
||||
class OCIException(SentryIgnoredException):
|
||||
"""OCI-related errors"""
|
||||
|
||||
|
||||
class BlueprintOCIClient:
|
||||
"""Blueprint OCI Client"""
|
||||
|
||||
url: ParseResult
|
||||
sanitized_url: str
|
||||
logger: BoundLogger
|
||||
ref: str
|
||||
client: NewClient
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
self._parse_url(url)
|
||||
self.logger = get_logger().bind(url=self.sanitized_url)
|
||||
|
||||
self.ref = "latest"
|
||||
# Remove the leading slash of the path to convert it to an image name
|
||||
path = self.url.path[1:]
|
||||
if ":" in path:
|
||||
# if there's a colon in the path, use everything after it as a ref
|
||||
path, _, self.ref = path.partition(":")
|
||||
base_url = f"https://{self.url.hostname}"
|
||||
if self.url.port:
|
||||
base_url += f":{self.url.port}"
|
||||
self.client = NewClient(
|
||||
base_url,
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(self.url.username, self.url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
|
||||
def _parse_url(self, url: str):
|
||||
self.url = urlparse(url)
|
||||
netloc = self.url.netloc
|
||||
if "@" in netloc:
|
||||
netloc = netloc[netloc.index("@") + 1 :]
|
||||
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
|
||||
|
||||
def fetch_manifests(self) -> dict[str, Any]:
|
||||
"""Fetch manifests for ref"""
|
||||
self.logger.info("Fetching OCI manifests for blueprint")
|
||||
manifest_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(self.ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
try:
|
||||
manifest_response = self.client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise OCIException(manifest["errors"])
|
||||
return manifest
|
||||
|
||||
def fetch_blobs(self, manifest: dict[str, Any]):
|
||||
"""Fetch blob based on manifest info"""
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
self.logger.debug("Found layer with matching media type", blob=blob)
|
||||
if not blob:
|
||||
raise OCIException("Blob not found")
|
||||
|
||||
blob_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
try:
|
||||
blob_response = self.client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
@ -10,6 +10,13 @@ from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
from watchdog.events import (
|
||||
FileCreatedEvent,
|
||||
FileModifiedEvent,
|
||||
FileSystemEvent,
|
||||
FileSystemEventHandler,
|
||||
)
|
||||
from watchdog.observers import Observer
|
||||
from yaml import load
|
||||
from yaml.error import YAMLError
|
||||
|
||||
@ -21,6 +28,7 @@ from authentik.blueprints.models import (
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
@ -32,6 +40,7 @@ from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
_file_watcher_started = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -45,6 +54,39 @@ class BlueprintFile:
|
||||
meta: Optional[BlueprintMetadata] = field(default=None)
|
||||
|
||||
|
||||
def start_blueprint_watcher():
|
||||
"""Start blueprint watcher, if it's not running already."""
|
||||
# This function might be called twice since it's called on celery startup
|
||||
# pylint: disable=global-statement
|
||||
global _file_watcher_started
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
|
||||
class BlueprintEventHandler(FileSystemEventHandler):
|
||||
"""Event handler for blueprint events"""
|
||||
|
||||
def on_any_event(self, event: FileSystemEvent):
|
||||
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discovery.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
@ -60,8 +102,10 @@ def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.y("blueprints_dir"))
|
||||
for file in root.glob("**/*.yaml"):
|
||||
path = Path(file)
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
@ -82,7 +126,7 @@ def blueprints_find():
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.info(
|
||||
LOGGER.debug(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
@ -94,7 +138,7 @@ def blueprints_find():
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discover(self: MonitoredTask):
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
@ -179,3 +223,14 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def clear_failed_blueprints():
|
||||
"""Remove blueprints which couldn't be fetched"""
|
||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX):
|
||||
try:
|
||||
blueprint.retrieve()
|
||||
except BlueprintRetrievalFailed:
|
||||
blueprint.delete()
|
||||
|
@ -1,15 +1,17 @@
|
||||
"""Application API Views"""
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -21,6 +23,7 @@ from structlog.testing import capture_logs
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import Application, User
|
||||
@ -35,7 +38,6 @@ from authentik.lib.utils.file import (
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -50,6 +52,9 @@ class ApplicationSerializer(ModelSerializer):
|
||||
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
|
||||
backchannel_providers_obj = ProviderSerializer(
|
||||
source="backchannel_providers", required=False, read_only=True, many=True
|
||||
)
|
||||
|
||||
meta_icon = ReadOnlyField(source="get_meta_icon")
|
||||
|
||||
@ -60,8 +65,12 @@ class ApplicationSerializer(ModelSerializer):
|
||||
user = self.context["request"].user
|
||||
return app.get_launch_url(user)
|
||||
|
||||
class Meta:
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["icon"] = CharField(source="meta_icon", required=False)
|
||||
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = [
|
||||
"pk",
|
||||
@ -69,6 +78,8 @@ class ApplicationSerializer(ModelSerializer):
|
||||
"slug",
|
||||
"provider",
|
||||
"provider_obj",
|
||||
"backchannel_providers",
|
||||
"backchannel_providers_obj",
|
||||
"launch_url",
|
||||
"open_in_new_tab",
|
||||
"meta_launch_url",
|
||||
@ -80,6 +91,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
]
|
||||
extra_kwargs = {
|
||||
"meta_icon": {"read_only": True},
|
||||
"backchannel_providers": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
@ -185,10 +197,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if superuser_full_list and request.user.is_superuser:
|
||||
return super().list(request)
|
||||
|
||||
# To prevent the user from having to double login when prompt is set to login
|
||||
# and the user has just signed it. This session variable is set in the UserLoginStage
|
||||
# and is (quite hackily) removed from the session in applications's API's List method
|
||||
self.request.session.pop(USER_LOGIN_AUTHENTICATED, None)
|
||||
queryset = self._filter_queryset_for_list(self.get_queryset())
|
||||
self.paginate_queryset(queryset)
|
||||
|
||||
@ -196,9 +204,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if not should_cache:
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
if should_cache:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
||||
if not allowed_applications:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
cache.set(
|
||||
user_app_cache_key(self.request.user.pk),
|
||||
@ -225,7 +233,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set application icon"""
|
||||
app: Application = self.get_object()
|
||||
@ -245,7 +252,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set application icon (as URL)"""
|
||||
app: Application = self.get_object()
|
||||
@ -254,15 +260,14 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
def metrics(self, request: Request, slug: str):
|
||||
"""Metrics for application logins"""
|
||||
app = self.get_object()
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION,
|
||||
context__authorized_application__pk=app.pk.hex,
|
||||
)
|
||||
.get_events_per_hour()
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
@ -74,7 +74,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = AuthenticatedSession
|
||||
fields = [
|
||||
"uuid",
|
||||
|
@ -2,27 +2,32 @@
|
||||
from json import loads
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import Http404
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import is_dict
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -31,7 +36,6 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
"is_active",
|
||||
"last_login",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
"uid",
|
||||
]
|
||||
@ -49,7 +53,6 @@ class GroupSerializer(ModelSerializer):
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
@ -89,7 +92,6 @@ class GroupFilter(FilterSet):
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
@ -108,11 +110,16 @@ class GroupFilter(FilterSet):
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||
|
||||
|
||||
class UserAccountSerializer(PassiveSerializer):
|
||||
"""Account adding/removing operations"""
|
||||
|
||||
pk = IntegerField(required=True)
|
||||
|
||||
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Group Viewset"""
|
||||
|
||||
@ -134,3 +141,51 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
if self.request.user.has_perm("authentik_core.view_group"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def add_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.add(user)
|
||||
return Response(status=204)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def remove_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.remove(user)
|
||||
return Response(status=204)
|
||||
|
@ -49,7 +49,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
return expression
|
||||
|
||||
class Meta:
|
||||
|
||||
model = PropertyMapping
|
||||
fields = [
|
||||
"pk",
|
||||
@ -94,7 +93,6 @@ class PropertyMappingViewSet(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
"description": subclass.__doc__,
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
"component": subclass().component,
|
||||
"model_name": subclass._meta.model_name,
|
||||
}
|
||||
@ -117,7 +115,6 @@ class PropertyMappingViewSet(
|
||||
],
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
# pylint: disable=unused-argument, invalid-name
|
||||
def test(self, request: Request, pk: str) -> Response:
|
||||
"""Test Property Mapping"""
|
||||
mapping: PropertyMapping = self.get_object()
|
||||
|
@ -1,5 +1,9 @@
|
||||
"""Provider API Views"""
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.query import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_filters.filters import BooleanFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
@ -20,31 +24,58 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
assigned_application_slug = ReadOnlyField(source="application.slug")
|
||||
assigned_application_name = ReadOnlyField(source="application.name")
|
||||
assigned_backchannel_application_slug = ReadOnlyField(source="backchannel_application.slug")
|
||||
assigned_backchannel_application_name = ReadOnlyField(source="backchannel_application.name")
|
||||
|
||||
component = SerializerMethodField()
|
||||
|
||||
def get_component(self, obj: Provider) -> str: # pragma: no cover
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if obj.__class__ == Provider:
|
||||
return ""
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Provider
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"authentication_flow",
|
||||
"authorization_flow",
|
||||
"property_mappings",
|
||||
"component",
|
||||
"assigned_application_slug",
|
||||
"assigned_application_name",
|
||||
"assigned_backchannel_application_slug",
|
||||
"assigned_backchannel_application_name",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"authorization_flow": {"required": True, "allow_null": False},
|
||||
}
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
"""Filter for providers"""
|
||||
|
||||
application__isnull = BooleanFilter(method="filter_application__isnull")
|
||||
backchannel_only = BooleanFilter(
|
||||
method="filter_backchannel_only",
|
||||
)
|
||||
|
||||
def filter_application__isnull(self, queryset: QuerySet, name, value):
|
||||
"""Only return providers that are neither assigned to application,
|
||||
both as provider or application provider"""
|
||||
return queryset.filter(
|
||||
Q(backchannel_application__isnull=value, is_backchannel=True)
|
||||
| Q(application__isnull=value)
|
||||
)
|
||||
|
||||
def filter_backchannel_only(self, queryset: QuerySet, name, value):
|
||||
"""Only return backchannel providers"""
|
||||
return queryset.filter(is_backchannel=value)
|
||||
|
||||
|
||||
class ProviderViewSet(
|
||||
@ -58,9 +89,7 @@ class ProviderViewSet(
|
||||
|
||||
queryset = Provider.objects.none()
|
||||
serializer_class = ProviderSerializer
|
||||
filterset_fields = {
|
||||
"application": ["isnull"],
|
||||
}
|
||||
filterset_class = ProviderFilter
|
||||
search_fields = [
|
||||
"name",
|
||||
"application__name",
|
||||
@ -76,6 +105,8 @@ class ProviderViewSet(
|
||||
data = []
|
||||
for subclass in all_subclasses(self.queryset.model):
|
||||
subclass: Provider
|
||||
if subclass._meta.abstract:
|
||||
continue
|
||||
data.append(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
|
@ -5,16 +5,18 @@ from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Source, UserSourceConnection
|
||||
@ -40,13 +42,16 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if obj.__class__ == Source:
|
||||
return ""
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["icon"] = CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = Source
|
||||
fields = [
|
||||
"pk",
|
||||
@ -102,7 +107,6 @@ class SourceViewSet(
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set source icon"""
|
||||
source: Source = self.get_object()
|
||||
@ -122,7 +126,6 @@ class SourceViewSet(
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set source icon (as URL)"""
|
||||
source: Source = self.get_object()
|
||||
@ -142,7 +145,6 @@ class SourceViewSet(
|
||||
component = subclass.__bases__[0]().component
|
||||
else:
|
||||
component = subclass().component
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
data.append(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
@ -209,5 +211,6 @@ class UserSourceConnectionViewSet(
|
||||
queryset = UserSourceConnection.objects.all()
|
||||
serializer_class = UserSourceConnectionSerializer
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filterset_fields = ["user"]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["pk"]
|
||||
|
@ -16,6 +16,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.api import ManagedSerializer
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import UserSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
@ -29,17 +30,27 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
||||
|
||||
user_obj = UserSerializer(required=False, source="user", read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["key"] = CharField()
|
||||
|
||||
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
||||
"""Ensure only API or App password tokens are created."""
|
||||
request: Request = self.context["request"]
|
||||
attrs.setdefault("user", request.user)
|
||||
request: Request = self.context.get("request")
|
||||
if not request:
|
||||
if "user" not in attrs:
|
||||
raise ValidationError("Missing user")
|
||||
if "intent" not in attrs:
|
||||
raise ValidationError("Missing intent")
|
||||
else:
|
||||
attrs.setdefault("user", request.user)
|
||||
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
||||
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
||||
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Token
|
||||
fields = [
|
||||
"pk",
|
||||
@ -112,7 +123,6 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
}
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
|
||||
# pylint: disable=unused-argument
|
||||
def view_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Return token key and log access"""
|
||||
token: Token = self.get_object()
|
||||
@ -134,11 +144,11 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
# pylint: disable=unused-argument
|
||||
def set_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Return token key and log access"""
|
||||
"""Set token key. Action is logged as event. `authentik_core.set_token_key` permission
|
||||
is required."""
|
||||
token: Token = self.get_object()
|
||||
key = request.POST.get("key")
|
||||
key = request.data.get("key")
|
||||
if not key:
|
||||
return Response(status=400)
|
||||
token.key = key
|
||||
|
@ -53,10 +53,9 @@ class UsedByMixin:
|
||||
responses={200: UsedBySerializer(many=True)},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument, too-many-locals
|
||||
# pylint: disable=too-many-locals
|
||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Get a list of all objects that use this object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
model: Model = self.get_object()
|
||||
used_by = []
|
||||
shadows = []
|
||||
|
@ -4,6 +4,9 @@ from json import loads
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
@ -35,11 +38,13 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
BooleanField,
|
||||
DateTimeField,
|
||||
ListSerializer,
|
||||
ModelSerializer,
|
||||
PrimaryKeyRelatedField,
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
@ -56,15 +61,18 @@ from authentik.core.models import (
|
||||
USER_ATTRIBUTE_SA,
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
AuthenticatedSession,
|
||||
Group,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import FlowToken
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||
from authentik.flows.views.executor import QS_KEY_TOKEN
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
@ -80,7 +88,6 @@ class UserGroupSerializer(ModelSerializer):
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
@ -100,11 +107,11 @@ class UserSerializer(ModelSerializer):
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
groups = PrimaryKeyRelatedField(
|
||||
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all()
|
||||
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all(), default=list
|
||||
)
|
||||
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(max_length=150)
|
||||
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Validate path"""
|
||||
@ -116,7 +123,6 @@ class UserSerializer(ModelSerializer):
|
||||
return path
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -168,7 +174,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -199,38 +204,47 @@ class SessionUserSerializer(PassiveSerializer):
|
||||
class UserMetricsSerializer(PassiveSerializer):
|
||||
"""User Metrics"""
|
||||
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
authorizations_per_1h = SerializerMethodField()
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED, context__username=user.username
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
def get_authorizations(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
|
||||
@ -262,7 +276,6 @@ class UsersFilter(FilterSet):
|
||||
queryset=Group.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
@ -318,12 +331,16 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
user: User = self.get_object()
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
plan = planner.plan(
|
||||
self.request._request,
|
||||
{
|
||||
PLAN_CONTEXT_PENDING_USER: user,
|
||||
},
|
||||
)
|
||||
try:
|
||||
plan = planner.plan(
|
||||
self.request._request,
|
||||
{
|
||||
PLAN_CONTEXT_PENDING_USER: user,
|
||||
},
|
||||
)
|
||||
except FlowNonApplicableException:
|
||||
LOGGER.warning("Recovery flow not applicable to user")
|
||||
return None, None
|
||||
token, __ = FlowToken.objects.update_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
defaults={
|
||||
@ -346,6 +363,11 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
{
|
||||
"name": CharField(required=True),
|
||||
"create_group": BooleanField(default=False),
|
||||
"expiring": BooleanField(default=True),
|
||||
"expires": DateTimeField(
|
||||
required=False,
|
||||
help_text="If not provided, valid for 360 days",
|
||||
),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
@ -366,14 +388,20 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Create a new user account that is marked as a service account"""
|
||||
username = request.data.get("name")
|
||||
create_group = request.data.get("create_group", False)
|
||||
expiring = request.data.get("expiring", True)
|
||||
expires = request.data.get("expires", now() + timedelta(days=360))
|
||||
|
||||
with atomic():
|
||||
try:
|
||||
user = User.objects.create(
|
||||
user: User = User.objects.create(
|
||||
username=username,
|
||||
name=username,
|
||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
|
||||
path=USER_PATH_SERVICE_ACCOUNT,
|
||||
)
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
response = {
|
||||
"username": user.username,
|
||||
"user_uid": user.uid,
|
||||
@ -389,17 +417,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
identifier=slugify(f"service-account-{username}-password"),
|
||||
intent=TokenIntents.INTENT_APP_PASSWORD,
|
||||
user=user,
|
||||
expires=now() + timedelta(days=360),
|
||||
expires=expires,
|
||||
expiring=expiring,
|
||||
)
|
||||
response["token"] = token.key
|
||||
return Response(response)
|
||||
except (IntegrityError) as exc:
|
||||
except IntegrityError as exc:
|
||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name
|
||||
def me(self, request: Request) -> Response:
|
||||
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_me(self, request: Request) -> Response:
|
||||
"""Get information about current user"""
|
||||
context = {"request": request}
|
||||
serializer = SessionUserSerializer(
|
||||
@ -427,7 +455,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def set_password(self, request: Request, pk: int) -> Response:
|
||||
"""Set password for user"""
|
||||
user: User = self.get_object()
|
||||
@ -445,12 +472,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def metrics(self, request: Request, pk: int) -> Response:
|
||||
"""User metrics per 1h"""
|
||||
user: User = self.get_object()
|
||||
serializer = UserMetricsSerializer(True)
|
||||
serializer = UserMetricsSerializer(instance={})
|
||||
serializer.context["user"] = user
|
||||
serializer.context["request"] = request
|
||||
return Response(serializer.data)
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@ -461,7 +488,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
link, _ = self._create_recovery_link()
|
||||
@ -486,7 +512,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery_email(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
for_user: User = self.get_object()
|
||||
@ -519,6 +544,58 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
send_mails(email_stage, message)
|
||||
return Response(status=204)
|
||||
|
||||
@permission_required("authentik_core.impersonate")
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
"401": OpenApiResponse(description="Access denied"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"])
|
||||
def impersonate(self, request: Request, pk: int) -> Response:
|
||||
"""Impersonate a user"""
|
||||
if not CONFIG.y_bool("impersonation"):
|
||||
LOGGER.debug("User attempted to impersonate", user=request.user)
|
||||
return Response(status=401)
|
||||
if not request.user.has_perm("impersonate"):
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
|
||||
user_to_be = self.get_object()
|
||||
|
||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||
|
||||
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
|
||||
|
||||
return Response(status=201)
|
||||
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
},
|
||||
)
|
||||
@action(detail=False, methods=["GET"])
|
||||
def impersonate_end(self, request: Request) -> Response:
|
||||
"""End Impersonation a user"""
|
||||
if (
|
||||
SESSION_KEY_IMPERSONATE_USER not in request.session
|
||||
or SESSION_KEY_IMPERSONATE_ORIGINAL_USER not in request.session
|
||||
):
|
||||
LOGGER.debug("Can't end impersonation", user=request.user)
|
||||
return Response(status=204)
|
||||
|
||||
original_user = request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
|
||||
|
||||
del request.session[SESSION_KEY_IMPERSONATE_USER]
|
||||
del request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
|
||||
|
||||
Event.new(EventAction.IMPERSONATION_ENDED).from_http(request, original_user)
|
||||
|
||||
return Response(status=204)
|
||||
|
||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||
for backend in list(self.filter_backends):
|
||||
@ -560,3 +637,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def partial_update(self, request: Request, *args, **kwargs) -> Response:
|
||||
response = super().partial_update(request, *args, **kwargs)
|
||||
instance: User = self.get_object()
|
||||
if not instance.is_active:
|
||||
sessions = AuthenticatedSession.objects.filter(user=instance)
|
||||
session_ids = sessions.values_list("session_key", flat=True)
|
||||
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
|
||||
sessions.delete()
|
||||
LOGGER.debug("Deleted user's sessions", user=instance.username)
|
||||
return response
|
||||
|
@ -2,7 +2,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Model
|
||||
from rest_framework.fields import CharField, IntegerField
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
||||
|
||||
|
||||
@ -23,6 +23,12 @@ class PassiveSerializer(Serializer):
|
||||
return Model()
|
||||
|
||||
|
||||
class PropertyMappingPreviewSerializer(PassiveSerializer):
|
||||
"""Preview how the current user is mapped via the property mappings selected in a provider"""
|
||||
|
||||
preview = JSONField(read_only=True)
|
||||
|
||||
|
||||
class MetaNameSerializer(PassiveSerializer):
|
||||
"""Add verbose names to response"""
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user