Compare commits
723 Commits
version/20
...
version-20
| Author | SHA1 | Date | |
|---|---|---|---|
| c15e4b24a1 | |||
| b6f518ffe6 | |||
| 4e476fd4e9 | |||
| 03503363e5 | |||
| 22d6621b02 | |||
| 0023df64c8 | |||
| 59a259e43a | |||
| c6f39f5eb4 | |||
| e3c0aad48a | |||
| 91dd33cee6 | |||
| 5a2c367e89 | |||
| 3b05c9cb1a | |||
| 6e53f1689d | |||
| e3be0f2550 | |||
| 294f2243c1 | |||
| 7b1373e8d6 | |||
| e70b486f20 | |||
| b90174f153 | |||
| 7d7acd8494 | |||
| 4d9d7c5efb | |||
| d614b3608d | |||
| beb2715fa7 | |||
| 5769ff45b5 | |||
| 9d6f79558f | |||
| 41d5bff9d3 | |||
| ec84ba9b6d | |||
| 042a62f99e | |||
| 907f02cfee | |||
| 53fe412bf9 | |||
| ef9e177fe9 | |||
| 28e675596b | |||
| 9b7f57cc75 | |||
| 935a8f4d58 | |||
| 01fcbb325b | |||
| 7d3d17acb9 | |||
| e434321f7c | |||
| ebd476be14 | |||
| 31ba543c62 | |||
| a101d48b5a | |||
| 4c166dcf52 | |||
| 47b1f025e1 | |||
| 8f44c792ac | |||
| e57b6f2347 | |||
| 275d0dfd03 | |||
| f18cbace7a | |||
| 212220554f | |||
| a596392bc3 | |||
| 3e22740eac | |||
| d18a691f63 | |||
| 3cd5e68bc1 | |||
| c741c13132 | |||
| 924f6f104a | |||
| 454594025b | |||
| e72097292c | |||
| ab17a12184 | |||
| 776f3f69a5 | |||
| 8560c7150a | |||
| 301386fb4a | |||
| 68e8b6990b | |||
| 4f800c4758 | |||
| 90c31c2214 | |||
| 50e3d317b2 | |||
| 3eed7bb010 | |||
| 0ef8edc9f1 | |||
| a6373ebb33 | |||
| bf8ce55eea | |||
| 61b4fcb5f3 | |||
| 81275e3bd1 | |||
| 7988bf7748 | |||
| 00d8eec360 | |||
| 82150c8e84 | |||
| 1dbd749a74 | |||
| a96479f16c | |||
| 5d5fb1f37e | |||
| b6f4d6a5eb | |||
| 8ab5c04c2c | |||
| 386944117e | |||
| 9154b9b85d | |||
| fc19372709 | |||
| e5d9c6537c | |||
| bf5cbac314 | |||
| 5cca637a3d | |||
| 5bfb8b454b | |||
| 4d96437972 | |||
| d03b0b8152 | |||
| c249b55ff5 | |||
| 1e1876b34c | |||
| a27493ad1b | |||
| 95b1ab820e | |||
| 5cf9f0002b | |||
| fc7a452b0c | |||
| 25ee0e4b45 | |||
| 46f12e62e8 | |||
| 4245dea25a | |||
| 908db3df81 | |||
| ef4f9aa437 | |||
| 902dd83c67 | |||
| 1c4b78b5f4 | |||
| d854d819d1 | |||
| f246da6b73 | |||
| 4a56b5e827 | |||
| 53b10e64f8 | |||
| 27e4c7027c | |||
| 410d1b97cd | |||
| f93f7e635b | |||
| 74eba04735 | |||
| 01bdaffe36 | |||
| f6b556713a | |||
| abe38bb16a | |||
| f2b8d45999 | |||
| 3f61dff1cb | |||
| b19da6d774 | |||
| 7c55616e29 | |||
| 952a7f07c1 | |||
| 6510b97c1e | |||
| 19b707a0fb | |||
| 320a600349 | |||
| 10110deae5 | |||
| 884c546f32 | |||
| abec906677 | |||
| 22d1dd801c | |||
| 03891cbe09 | |||
| 3c5157dfd4 | |||
| d241e8d51d | |||
| 7ba15884ed | |||
| 47356915b1 | |||
| 2520c92b78 | |||
| e7e0e6d213 | |||
| ca0250e19f | |||
| cf4c7c1bcb | |||
| 670af8789a | |||
| 5c5634830f | |||
| b6b0edb7ad | |||
| 45440abc80 | |||
| 9c42b75567 | |||
| e9a477c1eb | |||
| fa60655a5d | |||
| 5d729b4878 | |||
| 8692f7233f | |||
| 457e17fec3 | |||
| 87e99625e6 | |||
| 6f32eeea43 | |||
| dfcf8b2d40 | |||
| 846006f2e3 | |||
| f557b2129f | |||
| 6dc2003e34 | |||
| 0149c89003 | |||
| f458cae954 | |||
| f01d117ce6 | |||
| 2bde43e5dc | |||
| 84cc0b5490 | |||
| 2f3026084e | |||
| 89696edbee | |||
| c1f0833c09 | |||
| c77f804b77 | |||
| 8e83209631 | |||
| 2e48e0cc2f | |||
| e72f0ab160 | |||
| a3c681cc44 | |||
| 5b3a9e29fb | |||
| 15803dc67d | |||
| ff37e064c9 | |||
| ef8e922e2a | |||
| 34b11524f1 | |||
| 9e2492be5c | |||
| b3ba083ff0 | |||
| 22a8603892 | |||
| d83d058a4b | |||
| ec3fd4a3ab | |||
| 0764668b14 | |||
| 16b6c17305 | |||
| e60509697a | |||
| 85364af9e9 | |||
| cf4b4030aa | |||
| 74dc025869 | |||
| cabdc53553 | |||
| 29e9f399bd | |||
| dad43017a0 | |||
| 7fb939f97b | |||
| 88859b1c26 | |||
| c78236a2a2 | |||
| ba55538a34 | |||
| f742c73e24 | |||
| ca314c262c | |||
| b932b6c963 | |||
| 3c048a1921 | |||
| 8a60a7e26f | |||
| f10b57ba0b | |||
| e53114a645 | |||
| 2e50532518 | |||
| 1936ddfecb | |||
| 4afef46cb8 | |||
| 92b4244e81 | |||
| dfbf7027bc | |||
| eca2ef20d0 | |||
| cac5c7b3ea | |||
| 37ee555c8e | |||
| f910da0f8a | |||
| fc9d270992 | |||
| dcbc3d788a | |||
| 4658018a90 | |||
| 577b7ee515 | |||
| 621773c1ea | |||
| 3da526f20e | |||
| 052e465041 | |||
| c843f18743 | |||
| 80d0b14bb8 | |||
| 68637cf7cf | |||
| 82acba26af | |||
| ff8a812823 | |||
| 7f5fed2aea | |||
| a5c30fd9c7 | |||
| ef23a0da52 | |||
| ba527e7141 | |||
| 8edc254ab5 | |||
| 42627d21b0 | |||
| 2479b157d0 | |||
| 602573f83f | |||
| 20c33fa011 | |||
| 8599d9efe0 | |||
| 8e6fcfe350 | |||
| 558aa45201 | |||
| e9910732bc | |||
| 246dd4b062 | |||
| 4425f8d183 | |||
| c410bb8c36 | |||
| 44f62a4773 | |||
| b6ff04694f | |||
| d4ce0e8e41 | |||
| 362d72da8c | |||
| 88d0f8d8a8 | |||
| 61097b9400 | |||
| 7a73ddfb60 | |||
| d66f13c249 | |||
| 8cc3cb6a42 | |||
| 4c5537ddfe | |||
| a95779157d | |||
| 70256727fd | |||
| ac6afb2b82 | |||
| 2ea7bd86e8 | |||
| 95bce9c9e7 | |||
| 71a22c2a34 | |||
| f3eb85877d | |||
| 273f5211a0 | |||
| db06428ab9 | |||
| 109d8e48d4 | |||
| 2ca115285c | |||
| f5459645a5 | |||
| 14c159500d | |||
| 03da87991f | |||
| e38ee9c580 | |||
| 3bf53b2db1 | |||
| f33190caa5 | |||
| 741822424a | |||
| 0ca6fbb224 | |||
| f72b652b24 | |||
| 0a2c1eb419 | |||
| eb9593a847 | |||
| 7c71c52791 | |||
| 59493c02c4 | |||
| 83089b47d3 | |||
| 103e723d8c | |||
| 7d6e88061f | |||
| f8aab40e3e | |||
| 5123bc1316 | |||
| 30e8408e85 | |||
| bb34474101 | |||
| a105760123 | |||
| f410a77010 | |||
| 6ff8fdcc49 | |||
| 50ca3dc772 | |||
| 2a09fc0ae2 | |||
| fbb6756488 | |||
| f45fb2eac0 | |||
| 7b8cde17e6 | |||
| 186634fc67 | |||
| c84b1b7997 | |||
| 6e83467481 | |||
| 72db17f23b | |||
| ee4e176039 | |||
| e18e681c2b | |||
| 10fe67e08d | |||
| fc1db83be7 | |||
| 3740e65906 | |||
| 30386cd899 | |||
| 64a10e9a46 | |||
| 77d6242cce | |||
| 9a86dcaec3 | |||
| 0b00768b84 | |||
| d162c79373 | |||
| 05db352a0f | |||
| 5bf3d7fe02 | |||
| 1ae1cbebf4 | |||
| 8c16dfc478 | |||
| c6a3286e4c | |||
| 44cfd7e5b0 | |||
| 210d4c5058 | |||
| 6b39d616b1 | |||
| 32ace1bece | |||
| 54f893b84f | |||
| b5685ec072 | |||
| 5854833240 | |||
| 4b2437a6f1 | |||
| 2981ac7b10 | |||
| 59a51c859a | |||
| 47bab6c182 | |||
| 4e6714fffe | |||
| aa6b595545 | |||
| 0131b1f6cc | |||
| 9f53c359dd | |||
| 28e4dba3e8 | |||
| 2afd46e1df | |||
| f5991b19be | |||
| 5cc75cb25c | |||
| 68c1df2d39 | |||
| c83724f45c | |||
| 5f91c150df | |||
| 0bfe999442 | |||
| 58440b16c4 | |||
| 57757a2ff5 | |||
| 2993f506a7 | |||
| e4841d54a1 | |||
| 4f05dcec89 | |||
| ede6bcd31e | |||
| 728c8e994d | |||
| 5290b64415 | |||
| fec6de1ba2 | |||
| 69678dcfa6 | |||
| 4911a243ff | |||
| 70316b37da | |||
| 307cb94e3b | |||
| ace53a8fa5 | |||
| 0544dc3f83 | |||
| 708ff300a3 | |||
| 4e63f0f215 | |||
| 141481df3a | |||
| 29241cc287 | |||
| e81e97d404 | |||
| a5182e5c24 | |||
| cf5ff6e160 | |||
| f2b3a2ec91 | |||
| 69780c67a9 | |||
| ac9cf590bc | |||
| cb6edcb198 | |||
| 8eecc28c3c | |||
| 10b16bc36a | |||
| 2fe88cfea9 | |||
| caab396b56 | |||
| 5f0f4284a2 | |||
| c11be2284d | |||
| aa321196d7 | |||
| ff03db61a8 | |||
| f3b3ce6572 | |||
| 09b02e1aec | |||
| 451a9aaf01 | |||
| eaee7cb562 | |||
| a010c91a52 | |||
| 709194330f | |||
| 5914bbf173 | |||
| 5e9166f859 | |||
| 35b8ef6592 | |||
| 772a939f17 | |||
| 24971801cf | |||
| 43aebe8cb2 | |||
| 19cfc87c84 | |||
| f920f183c8 | |||
| 97f979c81e | |||
| e61411d396 | |||
| c4f985f542 | |||
| 302dee7ab2 | |||
| 83c12ad483 | |||
| 4224fd5c6f | |||
| 597ce1eb42 | |||
| 5ef385f0bb | |||
| cda4be3d47 | |||
| 8cdf22fc94 | |||
| 6efc7578ef | |||
| 4e2457560d | |||
| 2ddf122d27 | |||
| a24651437a | |||
| 30bb7acb17 | |||
| 7859145138 | |||
| 8a8aafec81 | |||
| deebdf2bcc | |||
| 4982c4abcb | |||
| 1486f90077 | |||
| f4988bc45e | |||
| 8abc9cc031 | |||
| 534689895c | |||
| 8a0dd6be24 | |||
| 65d2eed82d | |||
| e450e7b107 | |||
| 552ddda909 | |||
| bafeff7306 | |||
| 6791436302 | |||
| 7eda794070 | |||
| e3129c1067 | |||
| ff481ba6e7 | |||
| a106bad2db | |||
| 3a1c311d02 | |||
| 6465333f4f | |||
| b761659227 | |||
| 9321c355f8 | |||
| 86c8e79ea1 | |||
| 8916b1f8ab | |||
| 41fcf2aba6 | |||
| 87e72b08a9 | |||
| b2fcd42e3c | |||
| fc1b47a80f | |||
| af14e3502e | |||
| a2faa5ceb5 | |||
| 63a19a1381 | |||
| b472dcb7e7 | |||
| 6303909031 | |||
| 4bdc06865b | |||
| 2ee48cd039 | |||
| 893d5f452b | |||
| 340a9bc8ee | |||
| cb3d9f83f1 | |||
| 4ba55aa8e9 | |||
| bab6f501ec | |||
| 7327939684 | |||
| ffb0135f06 | |||
| ee0ddc3d17 | |||
| 5dd979d66c | |||
| a9bd34f3c5 | |||
| db316b59c5 | |||
| 6209714f87 | |||
| 1ed2bddba7 | |||
| 26b35c9b7b | |||
| 86a9271f75 | |||
| 402ed9bd20 | |||
| 68a0684569 | |||
| bd2e453218 | |||
| 1f31c63e57 | |||
| 480410efa2 | |||
| e9bfee52ed | |||
| 326b574d54 | |||
| 0a7abcf2ad | |||
| 9e5019881e | |||
| 8071750681 | |||
| f2f0931904 | |||
| a91204e5b9 | |||
| b14c22cbff | |||
| b3e40c6aed | |||
| 873aa4bb22 | |||
| c1ea78c422 | |||
| 3c8bbc2621 | |||
| 42a9979d91 | |||
| b7f94df4d9 | |||
| 4143d3fe28 | |||
| f95c06b76f | |||
| e3e9178ccc | |||
| b694816e7b | |||
| e046000f36 | |||
| edb5caae9b | |||
| 02d27651f3 | |||
| 44cd4d847d | |||
| 472256794d | |||
| cbb6887983 | |||
| 317e9ec605 | |||
| ada2a16412 | |||
| 61f6b0f122 | |||
| 6a3f7e45cf | |||
| 2b78c4ba86 | |||
| 680ef641fb | |||
| 2b5504ff63 | |||
| f8a6aa3250 | |||
| 6c23fc4b2b | |||
| 639c2f5c2e | |||
| e44632f9a0 | |||
| 3f2ce34468 | |||
| 426cef998f | |||
| 8ddb62ed0f | |||
| 572f6d4ea0 | |||
| 8db68410c6 | |||
| caa3c3de32 | |||
| 23b5ca761a | |||
| f1b9021e3e | |||
| 99c62af89e | |||
| 8ae50814fe | |||
| 2e2b491ec7 | |||
| ac432e78e2 | |||
| 83ac42ac43 | |||
| 4bd1cd127b | |||
| 2eb5a5cc76 | |||
| 75051687e6 | |||
| 7e316b5fc2 | |||
| 5594ad0b36 | |||
| ea097afeae | |||
| b77b4b5c80 | |||
| f8dc7f48f2 | |||
| 692e75b057 | |||
| 02771683a6 | |||
| 40404ff41d | |||
| fdd5211253 | |||
| 85a417d22e | |||
| 66c530ea06 | |||
| 347c3793fc | |||
| cf78c89830 | |||
| 20c738c384 | |||
| 4f54ce6afb | |||
| f0d7edb963 | |||
| e42ad8db93 | |||
| e917e756cc | |||
| b4963bec76 | |||
| 0d23796989 | |||
| d0ceafe79e | |||
| f2023a7af2 | |||
| 31d597005f | |||
| 62dc86be7b | |||
| 7aa8e35f87 | |||
| 60b95271eb | |||
| 382b0e8941 | |||
| 3b068610b9 | |||
| 9a8f62f42e | |||
| 632e3cf7dc | |||
| e7144649d5 | |||
| dd8909c9b2 | |||
| e6818c1f6a | |||
| 10c4e3c717 | |||
| b8425867c8 | |||
| a05da8cdbf | |||
| c3aeefa653 | |||
| 62c840df21 | |||
| 45d1db8880 | |||
| b34f30f1dd | |||
| 7a54e84eb4 | |||
| 917eef96fb | |||
| 9a393848b2 | |||
| a6abeb50c6 | |||
| 39acb044fb | |||
| 7d2f622f4b | |||
| a2b38caf64 | |||
| 1193b9fd22 | |||
| e3a5ef1907 | |||
| e597bb4542 | |||
| c31df2b3f9 | |||
| 3f2637cffa | |||
| 3b6d9bec0a | |||
| b184210610 | |||
| d2010808ee | |||
| f5b185dd06 | |||
| ae161c1ba9 | |||
| 109283b189 | |||
| 235d283def | |||
| 96a86b3298 | |||
| db9ea8603c | |||
| 8b7f698c7b | |||
| 813c13ce45 | |||
| 629a0e1a4d | |||
| d1e2c018a3 | |||
| 1e86844823 | |||
| b58875d4c7 | |||
| 03e0eecb1d | |||
| 7aa61d86e4 | |||
| 0e6a799e6d | |||
| bc6afdf94f | |||
| 80364b04a9 | |||
| 0948e0ee1c | |||
| 5c54de66fc | |||
| 937edc73bc | |||
| 2c0d8d8943 | |||
| 059ccdd592 | |||
| 0ec0d3f1aa | |||
| 0a0eee138a | |||
| 3ed4c38101 | |||
| de8cf65503 | |||
| 121b36f35f | |||
| 363aed2a47 | |||
| ef994e0084 | |||
| e1ef196283 | |||
| f81ffd54f3 | |||
| f9bfae9190 | |||
| 0d686465a4 | |||
| e13b4a561f | |||
| f6417f95e5 | |||
| 9c6bf5f4ae | |||
| d2d7acb50e | |||
| c7681dde32 | |||
| 8cf9661e08 | |||
| 2dbd76cf90 | |||
| 28d39f4d80 | |||
| 760428aa18 | |||
| 49bbac7441 | |||
| 0b8cfd437b | |||
| b69aaf9417 | |||
| 758d1bdfd4 | |||
| ab501ca971 | |||
| 9657741a3d | |||
| 29b7368f42 | |||
| 75724b6f8d | |||
| 7c9f821bfd | |||
| 5b9e6bed6c | |||
| 6113d7d768 | |||
| 0e3602d7eb | |||
| 2b94e9a687 | |||
| 6ed7d842e4 | |||
| 8794c840cf | |||
| 9c9c00755a | |||
| 6703c0a5d1 | |||
| 060f19ce06 | |||
| b2d2e7cbc8 | |||
| 91fd792f88 | |||
| 2d9cd28221 | |||
| aa64cf898f | |||
| 27d109c1fe | |||
| 1b4a14f3ee | |||
| 9835785864 | |||
| d785998c5a | |||
| 8ba9553220 | |||
| 6eb132c48b | |||
| b523cd064b | |||
| 355b832cc3 | |||
| 8f5af464a2 | |||
| fb70769358 | |||
| ad06778c34 | |||
| bcb4451fb7 | |||
| 110d558572 | |||
| e32d4f0095 | |||
| 0e413acd61 | |||
| d3397c349f | |||
| fb18a10e61 | |||
| 9bb0d04aeb | |||
| 666cf77b04 | |||
| 90ca1b8e5a | |||
| f1e95b8816 | |||
| dad8547212 | |||
| a957e1fc45 | |||
| 39e3f02503 | |||
| 2b999e922c | |||
| 4224134a19 | |||
| eda260dddd | |||
| 8a1dd521e1 | |||
| 1c5e91de1d | |||
| 4b1744fad0 | |||
| f17b83010d | |||
| 12ddf9e73c | |||
| 0b3b300333 | |||
| 23f1a19765 | |||
| b27e998615 | |||
| 2b928146a8 | |||
| a94b0504b7 | |||
| 4fcbfa7709 | |||
| 986e01db20 | |||
| 9092d1189b | |||
| 605ed94ba2 | |||
| 4cbeeb9a0c | |||
| 993dee6aad | |||
| c663deb659 | |||
| 61621e7d60 | |||
| 0ee9b07172 | |||
| 431ba6b4ef | |||
| 146818793e | |||
| 0ce663bce4 | |||
| 923ba4fb42 | |||
| bb6eed0db1 | |||
| d1bd8f333b | |||
| 2ac9f5426d | |||
| 8d1fd48003 | |||
| 241cb01ec6 | |||
| 65b4139997 | |||
| 1431be8c44 | |||
| 049fceeeee | |||
| e6638afa3c | |||
| 465898c7d0 | |||
| c363b1cfde | |||
| b30ffd1318 | |||
| fe0d3a64c8 | |||
| ae9f1c1063 | |||
| ea63d384fd | |||
| c28d75754d | |||
| 518b691e00 | |||
| cd845be45d | |||
| a813d8e05e | |||
| 75f850f4d2 | |||
| c84265c6f0 | |||
| a477ea29cd | |||
| f6aa85e340 | |||
| 0aeedb3ad8 | |||
| 4b29f238b5 | |||
| 34157db06a | |||
| 84b9e66a97 | |||
| e831e4fb94 | |||
| 956922820b | |||
| b0fac9c9f1 | |||
| f4db09cd59 | |||
| 047030f901 | |||
| 638e8d741f | |||
| 425b87a6d0 | |||
| e7dc763612 | |||
| a80cc94da9 | |||
| 547dd3cb7a | |||
| 95739a934c | |||
| d12e24017e | |||
| e4a0345231 | |||
| 078633c2af | |||
| 4b8b800648 | |||
| 6f9ed001a1 | |||
| e4095dfffe | |||
| d5341c2284 | |||
| 357bd65028 | |||
| 867fb0dac0 | |||
| 2666aa2c73 | |||
| f0e9bafa35 | |||
| 0d739f5c1a | |||
| e08077c73a | |||
| 7cf8a31057 | |||
| c43049a981 | |||
| 1a9ace6f9d | |||
| b8d86bc482 | |||
| f7044e41c6 | |||
| fa59fec17a | |||
| e29afa289e | |||
| 4d4193a586 | |||
| 59343ff441 | |||
| cab564152d | |||
| 97b814ab33 | |||
| 88516ba2ca | |||
| f069cfb643 | |||
| 4ce3c2341c | |||
| 77e42d60cb | |||
| cacb919c6f |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.10.4
|
||||
current_version = 2021.12.5
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
@ -17,7 +17,7 @@ values =
|
||||
beta
|
||||
stable
|
||||
|
||||
[bumpversion:file:website/docs/installation/docker-compose.md]
|
||||
[bumpversion:file:pyproject.toml]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
||||
@ -30,7 +30,3 @@ values =
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
||||
[bumpversion:file:web/src/constants.ts]
|
||||
|
||||
[bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md]
|
||||
|
||||
[bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md]
|
||||
|
||||
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -7,6 +7,7 @@ exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- pr_wanted
|
||||
- enhancement/confirmed
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
||||
299
.github/workflows/ci-main.yml
vendored
299
.github/workflows/ci-main.yml
vendored
@ -18,112 +18,51 @@ env:
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
lint-pylint:
|
||||
lint:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- pylint
|
||||
- black
|
||||
- isort
|
||||
- bandit
|
||||
- pyright
|
||||
- pending-migrations
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run pylint
|
||||
run: pipenv run pylint authentik tests lifecycle
|
||||
lint-black:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run black
|
||||
run: pipenv run black --check authentik tests lifecycle
|
||||
lint-isort:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run isort
|
||||
run: pipenv run isort --check authentik tests lifecycle
|
||||
lint-bandit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run bandit
|
||||
run: pipenv run bandit -r authentik tests lifecycle
|
||||
lint-pyright:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
npm install -g pyright@1.1.136
|
||||
- name: run bandit
|
||||
run: pipenv run pyright e2e lifecycle
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run job
|
||||
run: poetry run make ci-${{ matrix.job }}
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -131,68 +70,79 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
run: |
|
||||
python ./scripts/gh_env.py
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
sudo pip install -U pipenv
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
cp -R poetry.lock pyproject.toml ..
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
git checkout ${{ steps.ev.outputs.branchName }} -- .github
|
||||
git checkout ${{ steps.ev.outputs.branchName }} -- scripts
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts ../poetry.lock ../pyproject.toml .
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
# Sync anyways since stable will have different dependencies
|
||||
pipenv sync --dev
|
||||
# TODO: Remove after next stable release
|
||||
if [[ -f "Pipfile.lock" ]]; then
|
||||
pipenv install --dev
|
||||
fi
|
||||
poetry install
|
||||
- name: run migrations to stable
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
run: |
|
||||
set -x
|
||||
git fetch
|
||||
git checkout ${{ steps.ev.outputs.branchName }}
|
||||
pipenv sync --dev
|
||||
git reset --hard HEAD
|
||||
# TODO: Remove after next stable release
|
||||
rm -f poetry.lock
|
||||
git checkout $GITHUB_SHA
|
||||
# TODO: Remove after next stable release
|
||||
if [[ -f "Pipfile.lock" ]]; then
|
||||
pipenv install --dev
|
||||
fi
|
||||
poetry install
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: migrate to latest
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: run unittest
|
||||
run: |
|
||||
pipenv run make test
|
||||
pipenv run coverage xml
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
@ -204,16 +154,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
@ -222,21 +170,19 @@ jobs:
|
||||
uses: helm/kind-action@v1.2.0
|
||||
- name: run integration
|
||||
run: |
|
||||
pipenv run make test-integration
|
||||
pipenv run coverage xml
|
||||
poetry run make test-integration
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [integration]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
test-e2e:
|
||||
test-e2e-provider:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
@ -245,19 +191,19 @@ jobs:
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v2.1.6
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||
@ -269,36 +215,90 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
pipenv run make test-e2e
|
||||
pipenv run coverage xml
|
||||
poetry run make test-e2e-provider
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [e2e]unittest.xml --link=codecov
|
||||
testspace [e2e-provider]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
build:
|
||||
test-e2e-rest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd web
|
||||
npm i
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-rest
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [e2e-rest]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
ci-core-mark:
|
||||
needs:
|
||||
- lint-pylint
|
||||
- lint-black
|
||||
- lint-isort
|
||||
- lint-bandit
|
||||
- lint-pyright
|
||||
- lint
|
||||
- test-migrations
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e
|
||||
- test-e2e-rest
|
||||
- test-e2e-provider
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_env.py
|
||||
- name: Login to Container Registry
|
||||
@ -317,3 +317,4 @@ jobs:
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
|
||||
56
.github/workflows/ci-outpost.yml
vendored
56
.github/workflows/ci-outpost.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.16.3'
|
||||
go-version: "^1.17"
|
||||
- name: Run linter
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
@ -30,24 +30,35 @@ jobs:
|
||||
-w /app \
|
||||
golangci/golangci-lint:v1.39.0 \
|
||||
golangci-lint run -v --timeout 200s
|
||||
ci-outpost-mark:
|
||||
needs:
|
||||
- lint-golint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- lint-golint
|
||||
- ci-outpost-mark
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_env.py
|
||||
- name: Login to Container Registry
|
||||
@ -68,3 +79,42 @@ jobs:
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
run: |
|
||||
cd web
|
||||
npm install
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
|
||||
8
.github/workflows/ci-web.yml
vendored
8
.github/workflows/ci-web.yml
vendored
@ -65,12 +65,18 @@ jobs:
|
||||
run: |
|
||||
cd web
|
||||
npm run lit-analyse
|
||||
build:
|
||||
ci-web-mark:
|
||||
needs:
|
||||
- lint-eslint
|
||||
- lint-prettier
|
||||
- lint-lit-analyse
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
|
||||
130
.github/workflows/release-publish.yml
vendored
130
.github/workflows/release-publish.yml
vendored
@ -30,14 +30,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.10.4,
|
||||
beryju/authentik:2021.12.5,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.10.4,
|
||||
ghcr.io/goauthentik/server:2021.12.5,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
@ -45,13 +45,19 @@ jobs:
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
||||
docker push ghcr.io/goauthentik/server:stable
|
||||
build-proxy:
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.15"
|
||||
go-version: "^1.17"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -72,68 +78,65 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-proxy:2021.10.4,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.10.4,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: proxy.Dockerfile
|
||||
beryju/authentik-${{ matrix.type }}:2021.12.5,
|
||||
beryju/authentik-${{ matrix.type }}:latest,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:2021.12.5,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
docker push beryju/authentik-proxy:stable
|
||||
docker pull ghcr.io/goauthentik/proxy:latest
|
||||
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
|
||||
docker push ghcr.io/goauthentik/proxy:stable
|
||||
build-ldap:
|
||||
docker pull beryju/authentik-${{ matrix.type }}:latest
|
||||
docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable
|
||||
docker push beryju/authentik-${{ matrix.type }}:stable
|
||||
docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable
|
||||
docker push ghcr.io/goauthentik/${{ matrix.type }}:stable
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.15"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v1
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.10.4,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.10.4,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
run: |
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
docker push beryju/authentik-ldap:stable
|
||||
docker pull ghcr.io/goauthentik/ldap:latest
|
||||
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
|
||||
docker push ghcr.io/goauthentik/ldap:stable
|
||||
cd web
|
||||
npm install
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- name: Upload binaries to release
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
tag: ${{ github.ref }}
|
||||
test-release:
|
||||
needs:
|
||||
- build-server
|
||||
- build-proxy
|
||||
- build-ldap
|
||||
- build-outpost
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -147,20 +150,17 @@ jobs:
|
||||
docker-compose run -u root server test
|
||||
sentry-release:
|
||||
needs:
|
||||
- test-release
|
||||
- build-server
|
||||
- build-outpost
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Build web api client and web ui
|
||||
- name: Get static files from docker image
|
||||
run: |
|
||||
export NODE_ENV=production
|
||||
cd web
|
||||
npm i
|
||||
npm run build
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
||||
docker cp ${container}:web/ .
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v1
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
@ -170,7 +170,7 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.10.4
|
||||
version: authentik@2021.12.5
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
||||
1
.github/workflows/release-tag.yml
vendored
1
.github/workflows/release-tag.yml
vendored
@ -15,6 +15,7 @@ jobs:
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker buildx install
|
||||
docker build \
|
||||
--no-cache \
|
||||
-t testing:latest \
|
||||
|
||||
21
.github/workflows/translation-compile.yml
vendored
21
.github/workflows/translation-compile.yml
vendored
@ -4,6 +4,9 @@ on:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- '/locale/'
|
||||
pull_request:
|
||||
paths:
|
||||
- '/locale/'
|
||||
schedule:
|
||||
- cron: "0 */2 * * *"
|
||||
workflow_dispatch:
|
||||
@ -19,21 +22,35 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: cache-poetry
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
python-version: '3.9'
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gettext
|
||||
scripts/ci_prepare.sh
|
||||
- name: run compile
|
||||
run: pipenv run ./manage.py compilemessages
|
||||
run: poetry run ./manage.py compilemessages
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
body: "core: compile backend translations"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
- name: Enable Pull Request Automerge
|
||||
if: steps.cpr.outputs.pull-request-operation == 'created'
|
||||
uses: peter-evans/enable-pull-request-automerge@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
||||
9
.github/workflows/web-api-publish.yml
vendored
9
.github/workflows/web-api-publish.yml
vendored
@ -30,10 +30,19 @@ jobs:
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: update-web-api-client
|
||||
commit-message: "web: Update Web API Client version"
|
||||
title: "web: Update Web API Client version"
|
||||
body: "web: Update Web API Client version"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
- name: Enable Pull Request Automerge
|
||||
if: steps.cpr.outputs.pull-request-operation == 'created'
|
||||
uses: peter-evans/enable-pull-request-automerge@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -66,7 +66,9 @@ coverage.xml
|
||||
unittest.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
# Have to include binary mo files as they are annoying to compile at build time
|
||||
# since a full postgres and redis instance are required
|
||||
# *.mo
|
||||
|
||||
# Django stuff:
|
||||
|
||||
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
||||
3.9.7
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@ -10,7 +10,9 @@
|
||||
"plex",
|
||||
"saml",
|
||||
"totp",
|
||||
"webauthn"
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
|
||||
76
Dockerfile
76
Dockerfile
@ -1,75 +1,68 @@
|
||||
# Stage 1: Lock python dependencies
|
||||
FROM docker.io/python:3.9-slim-bullseye as locker
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:16 as website-builder
|
||||
|
||||
COPY ./Pipfile /app/
|
||||
COPY ./Pipfile.lock /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN pip install pipenv && \
|
||||
pipenv lock -r > requirements.txt && \
|
||||
pipenv lock -r --dev-only > requirements-dev.txt
|
||||
|
||||
# Stage 2: Build website
|
||||
FROM docker.io/node:16 as website-builder
|
||||
|
||||
COPY ./website /static/
|
||||
COPY ./website /work/website/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build-docs-only
|
||||
RUN cd /work/website && npm i && npm run build-docs-only
|
||||
|
||||
# Stage 3: Build webui
|
||||
FROM docker.io/node:16 as web-builder
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:16 as web-builder
|
||||
|
||||
COPY ./web /static/
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build
|
||||
RUN cd /work/web && npm i && npm run build
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.17.3-bullseye AS builder
|
||||
# Stage 3: Build go proxy
|
||||
FROM docker.io/golang:1.17.5-bullseye AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
COPY --from=web-builder /static/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /static/security.txt /work/web/security.txt
|
||||
COPY --from=web-builder /static/dist/ /work/web/dist/
|
||||
COPY --from=web-builder /static/authentik/ /work/web/authentik/
|
||||
COPY --from=website-builder /static/help/ /work/website/help/
|
||||
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./website/static.go /work/website/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 5: Run
|
||||
FROM docker.io/python:3.9-slim-bullseye
|
||||
# Stage 4: Run
|
||||
FROM docker.io/python:3.10.1-slim-bullseye
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
|
||||
|
||||
WORKDIR /
|
||||
COPY --from=locker /app/requirements.txt /
|
||||
COPY --from=locker /app/requirements-dev.txt /
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./poetry.lock /
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \
|
||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
||||
pip install -r /requirements.txt --no-cache-dir && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl ca-certificates gnupg git runit libpq-dev \
|
||||
postgresql-client build-essential libxmlsec1-dev \
|
||||
pkg-config libmaxminddb0 && \
|
||||
pip install poetry && \
|
||||
poetry config virtualenvs.create false && \
|
||||
poetry install --no-dev && \
|
||||
rm -rf ~/.cache/pypoetry && \
|
||||
apt-get remove --purge -y build-essential git && \
|
||||
apt-get autoremove --purge -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
mkdir /backups && \
|
||||
chown authentik:authentik /backups
|
||||
mkdir -p /backups /certs /media && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
chown authentik:authentik /backups /certs /media /authentik/.ssh
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
@ -78,6 +71,9 @@ COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=builder /work/authentik /authentik-proxy
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
|
||||
USER authentik
|
||||
|
||||
|
||||
64
Makefile
64
Makefile
@ -4,16 +4,19 @@ UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
|
||||
all: lint-fix lint test gen
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-integration:
|
||||
coverage run manage.py test -v 3 tests/integration
|
||||
coverage run manage.py test tests/integration
|
||||
|
||||
test-e2e:
|
||||
coverage run manage.py test --failfast -v 3 tests/e2e
|
||||
test-e2e-provider:
|
||||
coverage run manage.py test tests/e2e/test_provider*
|
||||
|
||||
test-e2e-rest:
|
||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||
|
||||
test:
|
||||
coverage run manage.py test -v 3 authentik
|
||||
coverage run manage.py test authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
@ -32,10 +35,12 @@ lint-fix:
|
||||
lint:
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
pylint authentik tests lifecycle
|
||||
golangci-lint run -v
|
||||
|
||||
i18n-extract:
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
|
||||
i18n-extract-core:
|
||||
./manage.py makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
cd web && npm run extract
|
||||
|
||||
gen-build:
|
||||
./manage.py spectacular --file schema.yml
|
||||
@ -48,7 +53,7 @@ gen-web:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
ghcr.io/beryju/openapi-generator generate \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/web-api \
|
||||
@ -67,12 +72,13 @@ gen-outpost:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
openapitools/openapi-generator-cli:v5.2.1 generate \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api=./api
|
||||
rm -rf config.yaml ./templates/
|
||||
|
||||
gen: gen-build gen-clean gen-web
|
||||
|
||||
@ -81,3 +87,43 @@ migrate:
|
||||
|
||||
run:
|
||||
go run -v cmd/server/main.go
|
||||
|
||||
web-watch:
|
||||
cd web && npm run watch
|
||||
|
||||
web: web-lint-fix web-lint web-extract
|
||||
|
||||
web-lint-fix:
|
||||
cd web && npm run prettier
|
||||
|
||||
web-lint:
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-extract:
|
||||
cd web && npm run extract
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-pylint: ci--meta-debug
|
||||
pylint authentik tests lifecycle
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
black --check authentik tests lifecycle
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check authentik tests lifecycle
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
bandit -r authentik tests lifecycle
|
||||
|
||||
ci-pyright: ci--meta-debug
|
||||
pyright e2e lifecycle
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
./manage.py makemigrations --check
|
||||
|
||||
64
Pipfile
64
Pipfile
@ -1,64 +0,0 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[packages]
|
||||
boto3 = "*"
|
||||
celery = "*"
|
||||
channels = "*"
|
||||
channels-redis = "*"
|
||||
dacite = "*"
|
||||
defusedxml = "*"
|
||||
django = "*"
|
||||
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
|
||||
django-filter = "*"
|
||||
django-guardian = "*"
|
||||
django-model-utils = "*"
|
||||
django-otp = "*"
|
||||
django-prometheus = "*"
|
||||
django-redis = "*"
|
||||
django-storages = "*"
|
||||
djangorestframework = "*"
|
||||
djangorestframework-guardian = "*"
|
||||
docker = "*"
|
||||
drf-spectacular = "*"
|
||||
facebook-sdk = "*"
|
||||
geoip2 = "*"
|
||||
gunicorn = "*"
|
||||
kubernetes = "==v19.15.0"
|
||||
ldap3 = "*"
|
||||
lxml = "*"
|
||||
packaging = "*"
|
||||
psycopg2-binary = "*"
|
||||
pycryptodome = "*"
|
||||
pyjwt = "*"
|
||||
pyyaml = "*"
|
||||
requests-oauthlib = "*"
|
||||
sentry-sdk = "*"
|
||||
service_identity = "*"
|
||||
structlog = "*"
|
||||
swagger-spec-validator = "*"
|
||||
twisted = "==21.7.0"
|
||||
urllib3 = {extras = ["secure"],version = "*"}
|
||||
uvicorn = {extras = ["standard"],version = "*"}
|
||||
webauthn = "*"
|
||||
xmlsec = "*"
|
||||
duo-client = "*"
|
||||
ua-parser = "*"
|
||||
deepmerge = "*"
|
||||
colorama = "*"
|
||||
codespell = "*"
|
||||
|
||||
[dev-packages]
|
||||
bandit = "*"
|
||||
black = "==21.9b0"
|
||||
bump2version = "*"
|
||||
colorama = "*"
|
||||
coverage = {extras = ["toml"],version = "*"}
|
||||
pylint = "*"
|
||||
pylint-django = "*"
|
||||
pytest = "*"
|
||||
pytest-django = "*"
|
||||
selenium = "*"
|
||||
requests-mock = "*"
|
||||
2339
Pipfile.lock
generated
2339
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
20
README.md
20
README.md
@ -38,3 +38,23 @@ See [Development Documentation](https://goauthentik.io/developer-docs/?utm_sourc
|
||||
## Security
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
|
||||
<p>
|
||||
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](goauthentik.io) site.
|
||||
|
||||
@ -6,8 +6,8 @@
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2021.9.x | :white_check_mark: |
|
||||
| 2021.10.x | :white_check_mark: |
|
||||
| 2021.12.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.10.4"
|
||||
__version__ = "2021.12.5"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
@ -1,13 +1,6 @@
|
||||
"""authentik administration metrics"""
|
||||
import time
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models import Count, ExpressionWrapper, F
|
||||
from django.db.models.fields import DurationField
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -15,31 +8,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
|
||||
def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
|
||||
"""Get event count by hour in the last day, fill with zeros"""
|
||||
date_from = now() - timedelta(days=1)
|
||||
result = (
|
||||
Event.objects.filter(created__gte=date_from, **filter_kwargs)
|
||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||
.annotate(age_hours=ExtractHour("age"))
|
||||
.values("age_hours")
|
||||
.annotate(count=Count("pk"))
|
||||
.order_by("age_hours")
|
||||
)
|
||||
data = Counter({int(d["age_hours"]): d["count"] for d in result})
|
||||
results = []
|
||||
_now = now()
|
||||
for hour in range(0, -24, -1):
|
||||
results.append(
|
||||
{
|
||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||
"y_cord": data[hour * -1],
|
||||
}
|
||||
)
|
||||
return results
|
||||
from authentik.events.models import EventAction
|
||||
|
||||
|
||||
class CoordinateSerializer(PassiveSerializer):
|
||||
@ -58,12 +27,22 @@ class LoginMetricsSerializer(PassiveSerializer):
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
return get_events_per_1h(action=EventAction.LOGIN)
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED)
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
|
||||
class AdministrationMetricsViewSet(APIView):
|
||||
@ -75,4 +54,5 @@ class AdministrationMetricsViewSet(APIView):
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Login Metrics per 1h"""
|
||||
serializer = LoginMetricsSerializer(True)
|
||||
serializer.context["user"] = request.user
|
||||
return Response(serializer.data)
|
||||
|
||||
@ -86,7 +86,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||
"""Get the FQDN configured on the embedded outpost"""
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts.exists():
|
||||
if not outposts.exists(): # pragma: no cover
|
||||
return ""
|
||||
return outposts.first().config.authentik_host
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ class TaskSerializer(PassiveSerializer):
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
except AttributeError:
|
||||
except AttributeError: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
@ -95,7 +95,7 @@ class TaskViewSet(ViewSet):
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(status=204)
|
||||
except ImportError: # pragma: no cover
|
||||
except (ImportError, AttributeError): # pragma: no cover
|
||||
# if we get an import error, the module path has probably changed
|
||||
task.delete()
|
||||
return Response(status=500)
|
||||
|
||||
@ -23,6 +23,6 @@ class WorkerView(APIView):
|
||||
"""Get currently connected worker count."""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
|
||||
if settings.DEBUG:
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
count += 1
|
||||
return Response({"count": count})
|
||||
|
||||
@ -54,7 +54,7 @@ def clear_update_notifications():
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task()
|
||||
@prefill_task
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.y_bool("disable_update_check"):
|
||||
|
||||
@ -8,6 +8,7 @@ from authentik import __version__
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
from authentik.managed.tasks import managed_reconcile
|
||||
|
||||
|
||||
class TestAdminAPI(TestCase):
|
||||
@ -94,5 +95,7 @@ class TestAdminAPI(TestCase):
|
||||
|
||||
def test_system(self):
|
||||
"""Test system API"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
managed_reconcile() # pylint: disable=no-value-for-parameter
|
||||
response = self.client.get(reverse("authentik_api:admin_system"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@ -3,8 +3,13 @@ from django.core.cache import cache
|
||||
from django.test import TestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.admin.tasks import (
|
||||
VERSION_CACHE_KEY,
|
||||
clear_update_notifications,
|
||||
update_latest_version,
|
||||
)
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
RESPONSE_VALID = {
|
||||
"$schema": "https://version.goauthentik.io/schema.json",
|
||||
@ -56,3 +61,23 @@ class TestAdminTasks(TestCase):
|
||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_version_disabled(self):
|
||||
"""Test Update checker while its disabled"""
|
||||
with CONFIG.patch("disable_update_check", True):
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
|
||||
def test_clear_update_notifications(self):
|
||||
"""Test clear of previous notification"""
|
||||
Event.objects.create(
|
||||
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
|
||||
)
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
|
||||
clear_update_notifications()
|
||||
self.assertFalse(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"
|
||||
).exists()
|
||||
)
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
"""API Authentication"""
|
||||
from base64 import b64decode
|
||||
from binascii import Error
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
@ -69,7 +69,7 @@ def token_secret_key(value: str) -> Optional[User]:
|
||||
class TokenAuthentication(BaseAuthentication):
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
|
||||
def authenticate(self, request: Request) -> Union[tuple[User, Any], None]:
|
||||
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
auth = get_authorization_header(request)
|
||||
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
"""Throttling classes"""
|
||||
from typing import Type
|
||||
|
||||
from django.views import View
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.throttling import ScopedRateThrottle
|
||||
|
||||
|
||||
class SessionThrottle(ScopedRateThrottle):
|
||||
"""Throttle based on session key"""
|
||||
|
||||
def allow_request(self, request: Request, view):
|
||||
if request._request.user.is_superuser:
|
||||
return True
|
||||
return super().allow_request(request, view)
|
||||
|
||||
def get_cache_key(self, request: Request, view: Type[View]) -> str:
|
||||
return f"authentik-throttle-session-{request._request.session.session_key}"
|
||||
@ -5,7 +5,14 @@ from django.conf import settings
|
||||
from django.db import models
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, IntegerField, ListField
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
ChoiceField,
|
||||
FloatField,
|
||||
IntegerField,
|
||||
ListField,
|
||||
)
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -24,13 +31,19 @@ class Capabilities(models.TextChoices):
|
||||
CAN_BACKUP = "can_backup"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
"""Config for error reporting"""
|
||||
|
||||
enabled = BooleanField(read_only=True)
|
||||
environment = CharField(read_only=True)
|
||||
send_pii = BooleanField(read_only=True)
|
||||
traces_sample_rate = FloatField(read_only=True)
|
||||
|
||||
|
||||
class ConfigSerializer(PassiveSerializer):
|
||||
"""Serialize authentik Config into DRF Object"""
|
||||
|
||||
error_reporting_enabled = BooleanField(read_only=True)
|
||||
error_reporting_environment = CharField(read_only=True)
|
||||
error_reporting_send_pii = BooleanField(read_only=True)
|
||||
|
||||
error_reporting = ErrorReportingConfigSerializer(required=True)
|
||||
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
|
||||
|
||||
cache_timeout = IntegerField(required=True)
|
||||
@ -66,9 +79,12 @@ class ConfigView(APIView):
|
||||
"""Retrieve public configuration options"""
|
||||
config = ConfigSerializer(
|
||||
{
|
||||
"error_reporting_enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"error_reporting_environment": CONFIG.y("error_reporting.environment"),
|
||||
"error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"error_reporting": {
|
||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"environment": CONFIG.y("error_reporting.environment"),
|
||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
||||
|
||||
@ -46,11 +46,7 @@ from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||
from authentik.policies.reputation.api import (
|
||||
IPReputationViewSet,
|
||||
ReputationPolicyViewSet,
|
||||
UserReputationViewSet,
|
||||
)
|
||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||
@ -151,8 +147,7 @@ router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||
router.register("policies/password", PasswordPolicyViewSet)
|
||||
router.register("policies/reputation/users", UserReputationViewSet)
|
||||
router.register("policies/reputation/ips", IPReputationViewSet)
|
||||
router.register("policies/reputation/scores", ReputationViewSet)
|
||||
router.register("policies/reputation", ReputationPolicyViewSet)
|
||||
|
||||
router.register("providers/all", ProviderViewSet)
|
||||
|
||||
@ -5,6 +5,7 @@ from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
@ -15,7 +16,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
@ -239,8 +240,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Metrics for application logins"""
|
||||
app = self.get_object()
|
||||
return Response(
|
||||
get_events_per_1h(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION,
|
||||
context__authorized_application__pk=app.pk.hex,
|
||||
)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
"""Groups API Viewset"""
|
||||
from json import loads
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.filters import ModelMultipleChoiceFilter
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from rest_framework.fields import CharField, JSONField
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
@ -62,6 +64,13 @@ class GroupSerializer(ModelSerializer):
|
||||
class GroupFilter(FilterSet):
|
||||
"""Filter for groups"""
|
||||
|
||||
attributes = CharFilter(
|
||||
field_name="attributes",
|
||||
lookup_expr="",
|
||||
label="Attributes",
|
||||
method="filter_attributes",
|
||||
)
|
||||
|
||||
members_by_username = ModelMultipleChoiceFilter(
|
||||
field_name="users__username",
|
||||
to_field_name="username",
|
||||
@ -72,10 +81,28 @@ class GroupFilter(FilterSet):
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
value = loads(value)
|
||||
except ValueError:
|
||||
raise ValidationError(detail="filter: failed to parse JSON")
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError(detail="filter: value must be key:value mapping")
|
||||
qs = {}
|
||||
for key, _value in value.items():
|
||||
qs[f"attributes__{key}"] = _value
|
||||
try:
|
||||
_ = len(queryset.filter(**qs))
|
||||
return queryset.filter(**qs)
|
||||
except ValueError:
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "members_by_username"]
|
||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||
|
||||
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
@ -56,6 +56,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -43,6 +43,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"assigned_application_name",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -48,6 +48,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"policy_engine_mode",
|
||||
"user_matching_mode",
|
||||
]
|
||||
@ -103,14 +104,14 @@ class SourceViewSet(
|
||||
)
|
||||
matching_sources: list[UserSettingSerializer] = []
|
||||
for source in _all_sources:
|
||||
user_settings = source.ui_user_settings
|
||||
user_settings = source.ui_user_settings()
|
||||
if not user_settings:
|
||||
continue
|
||||
policy_engine = PolicyEngine(source, request.user, request)
|
||||
policy_engine.build()
|
||||
if not policy_engine.passing:
|
||||
continue
|
||||
source_settings = source.ui_user_settings
|
||||
source_settings = source.ui_user_settings()
|
||||
source_settings.initial_data["object_uid"] = source.slug
|
||||
if not source_settings.is_valid():
|
||||
LOGGER.warning(source_settings.errors)
|
||||
|
||||
@ -3,6 +3,7 @@ from datetime import timedelta
|
||||
from json import loads
|
||||
from typing import Optional
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
@ -38,7 +39,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
@ -46,6 +47,7 @@ from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CHANGE_EMAIL,
|
||||
USER_ATTRIBUTE_CHANGE_NAME,
|
||||
USER_ATTRIBUTE_CHANGE_USERNAME,
|
||||
USER_ATTRIBUTE_SA,
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
@ -55,6 +57,7 @@ from authentik.core.models import (
|
||||
User,
|
||||
)
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
@ -125,20 +128,41 @@ class UserSelfSerializer(ModelSerializer):
|
||||
|
||||
def validate_email(self, email: str):
|
||||
"""Check if the user is allowed to change their email"""
|
||||
if self.instance.group_attributes().get(USER_ATTRIBUTE_CHANGE_EMAIL, True):
|
||||
if self.instance.group_attributes().get(
|
||||
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.y_bool("default_user_change_email", True)
|
||||
):
|
||||
return email
|
||||
if email != self.instance.email:
|
||||
raise ValidationError("Not allowed to change email.")
|
||||
return email
|
||||
|
||||
def validate_name(self, name: str):
|
||||
"""Check if the user is allowed to change their name"""
|
||||
if self.instance.group_attributes().get(
|
||||
USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True)
|
||||
):
|
||||
return name
|
||||
if name != self.instance.name:
|
||||
raise ValidationError("Not allowed to change name.")
|
||||
return name
|
||||
|
||||
def validate_username(self, username: str):
|
||||
"""Check if the user is allowed to change their username"""
|
||||
if self.instance.group_attributes().get(USER_ATTRIBUTE_CHANGE_USERNAME, True):
|
||||
if self.instance.group_attributes().get(
|
||||
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.y_bool("default_user_change_username", True)
|
||||
):
|
||||
return username
|
||||
if username != self.instance.username:
|
||||
raise ValidationError("Not allowed to change username.")
|
||||
return username
|
||||
|
||||
def save(self, **kwargs):
|
||||
if self.instance:
|
||||
attributes: dict = self.instance.attributes
|
||||
attributes.update(self.validated_data.get("attributes", {}))
|
||||
self.validated_data["attributes"] = attributes
|
||||
return super().save(**kwargs)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
@ -179,19 +203,31 @@ class UserMetricsSerializer(PassiveSerializer):
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return get_events_per_1h(action=EventAction.LOGIN, user__pk=user.pk)
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return get_events_per_1h(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
|
||||
class UsersFilter(FilterSet):
|
||||
@ -228,7 +264,11 @@ class UsersFilter(FilterSet):
|
||||
qs = {}
|
||||
for key, _value in value.items():
|
||||
qs[f"attributes__{key}"] = _value
|
||||
return queryset.filter(**qs)
|
||||
try:
|
||||
_ = len(queryset.filter(**qs))
|
||||
return queryset.filter(**qs)
|
||||
except ValueError:
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
@ -309,7 +349,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
name=username,
|
||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||
)
|
||||
if create_group:
|
||||
if create_group and self.request.user.has_perm("authentik_core.add_group"):
|
||||
group = Group.objects.create(
|
||||
name=username,
|
||||
)
|
||||
@ -338,6 +378,35 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
).data
|
||||
return Response(serializer.initial_data)
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
request=inline_serializer(
|
||||
"UserPasswordSetSerializer",
|
||||
{
|
||||
"password": CharField(required=True),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
204: "",
|
||||
400: "",
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def set_password(self, request: Request, pk: int) -> Response:
|
||||
"""Set password for user"""
|
||||
user: User = self.get_object()
|
||||
try:
|
||||
user.set_password(request.data.get("password"))
|
||||
user.save()
|
||||
except (ValidationError, IntegrityError) as exc:
|
||||
LOGGER.debug("Failed to set password", exc=exc)
|
||||
return Response(status=400)
|
||||
if user.pk == request.user.pk and SESSION_IMPERSONATE_USER not in self.request.session:
|
||||
LOGGER.debug("Updating session hash after password change")
|
||||
update_session_auth_hash(self.request, user)
|
||||
return Response(status=204)
|
||||
|
||||
@extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)})
|
||||
@action(
|
||||
methods=["PUT"],
|
||||
|
||||
@ -41,6 +41,7 @@ class MetaNameSerializer(PassiveSerializer):
|
||||
|
||||
verbose_name = SerializerMethodField()
|
||||
verbose_name_plural = SerializerMethodField()
|
||||
meta_model_name = SerializerMethodField()
|
||||
|
||||
def get_verbose_name(self, obj: Model) -> str:
|
||||
"""Return object's verbose_name"""
|
||||
@ -50,6 +51,10 @@ class MetaNameSerializer(PassiveSerializer):
|
||||
"""Return object's plural verbose_name"""
|
||||
return obj._meta.verbose_name_plural
|
||||
|
||||
def get_meta_model_name(self, obj: Model) -> str:
|
||||
"""Return internal model name"""
|
||||
return f"{obj._meta.app_label}.{obj._meta.model_name}"
|
||||
|
||||
|
||||
class TypeCreateSerializer(PassiveSerializer):
|
||||
"""Types of an object that can be created"""
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
"""Output full config"""
|
||||
from json import dumps
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class Command(BaseCommand): # pragma: no cover
|
||||
"""Output full config"""
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Check permissions for all apps"""
|
||||
print(dumps(CONFIG.raw, indent=4))
|
||||
@ -5,6 +5,7 @@ from typing import Callable
|
||||
from uuid import uuid4
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from sentry_sdk.api import set_tag
|
||||
|
||||
SESSION_IMPERSONATE_USER = "authentik_impersonate_user"
|
||||
SESSION_IMPERSONATE_ORIGINAL_USER = "authentik_impersonate_original_user"
|
||||
@ -12,7 +13,6 @@ LOCAL = local()
|
||||
RESPONSE_HEADER_ID = "X-authentik-id"
|
||||
KEY_AUTH_VIA = "auth_via"
|
||||
KEY_USER = "user"
|
||||
INTERNAL_HEADER_PREFIX = "X-authentik-internal-"
|
||||
|
||||
|
||||
class ImpersonateMiddleware:
|
||||
@ -51,11 +51,12 @@ class RequestIDMiddleware:
|
||||
"request_id": request_id,
|
||||
"host": request.get_host(),
|
||||
}
|
||||
set_tag("authentik.request_id", request_id)
|
||||
response = self.get_response(request)
|
||||
response[RESPONSE_HEADER_ID] = request.request_id
|
||||
if auth_via := LOCAL.authentik.get(KEY_AUTH_VIA, None):
|
||||
response[INTERNAL_HEADER_PREFIX + KEY_AUTH_VIA] = auth_via
|
||||
response[INTERNAL_HEADER_PREFIX + KEY_USER] = request.user.username
|
||||
setattr(response, "ak_context", {})
|
||||
response.ak_context.update(LOCAL.authentik)
|
||||
response.ak_context[KEY_USER] = request.user.username
|
||||
for key in list(LOCAL.authentik.keys()):
|
||||
del LOCAL.authentik[key]
|
||||
return response
|
||||
@ -66,4 +67,6 @@ def structlog_add_request_id(logger: Logger, method_name: str, event_dict: dict)
|
||||
"""If threadlocal has authentik defined, add request_id to log"""
|
||||
if hasattr(LOCAL, "authentik"):
|
||||
event_dict.update(LOCAL.authentik)
|
||||
if hasattr(LOCAL, "authentik_task"):
|
||||
event_dict.update(LOCAL.authentik_task)
|
||||
return event_dict
|
||||
|
||||
@ -3,7 +3,6 @@
|
||||
import uuid
|
||||
from os import environ
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
@ -12,10 +11,10 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.models import Count
|
||||
|
||||
import authentik.core.models
|
||||
import authentik.lib.models
|
||||
|
||||
|
||||
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
|
||||
@ -161,7 +160,7 @@ class Migration(migrations.Migration):
|
||||
model_name="application",
|
||||
name="meta_launch_url",
|
||||
field=models.TextField(
|
||||
blank=True, default="", validators=[django.core.validators.URLValidator()]
|
||||
blank=True, default="", validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
|
||||
@ -12,7 +12,6 @@ import authentik.core.models
|
||||
|
||||
|
||||
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
# Generated by Django 3.2.3 on 2021-06-02 21:51
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.lib.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -17,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
validators=[django.core.validators.URLValidator()],
|
||||
validators=[authentik.lib.models.DomainlessURLValidator()],
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
"""authentik core models"""
|
||||
from datetime import timedelta
|
||||
from hashlib import md5, sha256
|
||||
from typing import Any, Optional, Type
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlencode
|
||||
from uuid import uuid4
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.hashers import check_password
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||
from django.core import validators
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.http import HttpRequest
|
||||
@ -26,10 +26,9 @@ from structlog.stdlib import get_logger
|
||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.managed.models import ManagedModel
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
@ -40,6 +39,7 @@ USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
||||
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
|
||||
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
|
||||
USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name"
|
||||
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||
|
||||
@ -162,6 +162,22 @@ class User(GuardianUserMixin, AbstractUser):
|
||||
self.password_change_date = now()
|
||||
return super().set_password(password)
|
||||
|
||||
def check_password(self, raw_password: str) -> bool:
|
||||
"""
|
||||
Return a boolean of whether the raw_password was correct. Handles
|
||||
hashing formats behind the scenes.
|
||||
|
||||
Slightly changed version which doesn't send a signal for such internal hash upgrades
|
||||
"""
|
||||
|
||||
def setter(raw_password):
|
||||
self.set_password(raw_password, signal=False)
|
||||
# Password hash upgrades shouldn't be considered password changes.
|
||||
self._password = None
|
||||
self.save(update_fields=["password"])
|
||||
|
||||
return check_password(raw_password, self.password, setter)
|
||||
|
||||
@property
|
||||
def uid(self) -> str:
|
||||
"""Generate a globall unique UID, based on the user ID and the hashed secret key"""
|
||||
@ -204,7 +220,7 @@ class Provider(SerializerModel):
|
||||
name = models.TextField()
|
||||
|
||||
authorization_flow = models.ForeignKey(
|
||||
Flow,
|
||||
"authentik_flows.Flow",
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_("Flow used when authorizing this provider."),
|
||||
related_name="provider_authorization",
|
||||
@ -226,7 +242,7 @@ class Provider(SerializerModel):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def serializer(self) -> Type[Serializer]:
|
||||
def serializer(self) -> type[Serializer]:
|
||||
"""Get serializer for this model"""
|
||||
raise NotImplementedError
|
||||
|
||||
@ -246,7 +262,7 @@ class Application(PolicyBindingModel):
|
||||
)
|
||||
|
||||
meta_launch_url = models.TextField(
|
||||
default="", blank=True, validators=[validators.URLValidator()]
|
||||
default="", blank=True, validators=[DomainlessURLValidator()]
|
||||
)
|
||||
# For template applications, this can be set to /static/authentik/applications/*
|
||||
meta_icon = models.FileField(
|
||||
@ -264,7 +280,7 @@ class Application(PolicyBindingModel):
|
||||
it is returned as-is"""
|
||||
if not self.meta_icon:
|
||||
return None
|
||||
if self.meta_icon.name.startswith("http") or self.meta_icon.name.startswith("/static"):
|
||||
if "://" in self.meta_icon.name or self.meta_icon.name.startswith("/static"):
|
||||
return self.meta_icon.name
|
||||
return self.meta_icon.url
|
||||
|
||||
@ -272,15 +288,21 @@ class Application(PolicyBindingModel):
|
||||
"""Get launch URL if set, otherwise attempt to get launch URL based on provider."""
|
||||
if self.meta_launch_url:
|
||||
return self.meta_launch_url
|
||||
if self.provider:
|
||||
return self.get_provider().launch_url
|
||||
if provider := self.get_provider():
|
||||
return provider.launch_url
|
||||
return None
|
||||
|
||||
def get_provider(self) -> Optional[Provider]:
|
||||
"""Get casted provider instance"""
|
||||
if not self.provider:
|
||||
return None
|
||||
return Provider.objects.get_subclass(pk=self.provider.pk)
|
||||
# if the Application class has been cache, self.provider is set
|
||||
# but doing a direct query lookup will fail.
|
||||
# In that case, just return None
|
||||
try:
|
||||
return Provider.objects.get_subclass(pk=self.provider.pk)
|
||||
except Provider.DoesNotExist:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@ -325,7 +347,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||
|
||||
authentication_flow = models.ForeignKey(
|
||||
Flow,
|
||||
"authentik_flows.Flow",
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
@ -334,7 +356,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
related_name="source_authentication",
|
||||
)
|
||||
enrollment_flow = models.ForeignKey(
|
||||
Flow,
|
||||
"authentik_flows.Flow",
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
@ -361,13 +383,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
"""Return component used to edit this object"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def ui_login_button(self) -> Optional[UILoginButton]:
|
||||
def ui_login_button(self, request: HttpRequest) -> Optional[UILoginButton]:
|
||||
"""If source uses a http-based flow, return UI Information about the login
|
||||
button. If source doesn't use http-based flow, return None."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
||||
"""Entrypoint to integrate with User settings. Can either return None if no
|
||||
user settings are available, or UserSettingSerializer."""
|
||||
@ -454,6 +474,14 @@ class Token(ManagedModel, ExpiringModel):
|
||||
"""Handler which is called when this object is expired."""
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
if self.intent in [
|
||||
TokenIntents.INTENT_RECOVERY,
|
||||
TokenIntents.INTENT_VERIFICATION,
|
||||
TokenIntents.INTENT_APP_PASSWORD,
|
||||
]:
|
||||
super().expire_action(*args, **kwargs)
|
||||
return
|
||||
|
||||
self.key = default_token_key()
|
||||
self.expires = default_token_duration()
|
||||
self.save(*args, **kwargs)
|
||||
@ -495,7 +523,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def serializer(self) -> Type[Serializer]:
|
||||
def serializer(self) -> type[Serializer]:
|
||||
"""Get serializer for this model"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
"""authentik core signals"""
|
||||
from typing import TYPE_CHECKING, Type
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
@ -62,7 +62,7 @@ def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def authenticated_session_delete(sender: Type[Model], instance: "AuthenticatedSession", **_):
|
||||
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
"""Source decision helper"""
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, Type
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib import messages
|
||||
from django.db import IntegrityError
|
||||
@ -14,6 +14,7 @@ from structlog.stdlib import get_logger
|
||||
from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection
|
||||
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import Flow, Stage, in_memory_stage
|
||||
from authentik.flows.planner import (
|
||||
PLAN_CONTEXT_PENDING_USER,
|
||||
@ -24,6 +25,8 @@ from authentik.flows.planner import (
|
||||
)
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.policies.utils import delete_none_keys
|
||||
from authentik.stages.password import BACKEND_INBUILT
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
|
||||
@ -50,7 +53,10 @@ class SourceFlowManager:
|
||||
|
||||
identifier: str
|
||||
|
||||
connection_type: Type[UserSourceConnection] = UserSourceConnection
|
||||
connection_type: type[UserSourceConnection] = UserSourceConnection
|
||||
|
||||
enroll_info: dict[str, Any]
|
||||
policy_context: dict[str, Any]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -64,6 +70,7 @@ class SourceFlowManager:
|
||||
self.identifier = identifier
|
||||
self.enroll_info = enroll_info
|
||||
self._logger = get_logger().bind(source=source, identifier=identifier)
|
||||
self.policy_context = {}
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]:
|
||||
@ -144,20 +151,23 @@ class SourceFlowManager:
|
||||
except IntegrityError as exc:
|
||||
self._logger.warning("failed to get action", exc=exc)
|
||||
return redirect("/")
|
||||
self._logger.debug("get_action() says", action=action, connection=connection)
|
||||
if connection:
|
||||
if action == Action.LINK:
|
||||
self._logger.debug("Linking existing user")
|
||||
return self.handle_existing_user_link(connection)
|
||||
if action == Action.AUTH:
|
||||
self._logger.debug("Handling auth user")
|
||||
return self.handle_auth_user(connection)
|
||||
if action == Action.ENROLL:
|
||||
self._logger.debug("Handling enrollment of new user")
|
||||
return self.handle_enroll(connection)
|
||||
self._logger.debug("get_action", action=action, connection=connection)
|
||||
try:
|
||||
if connection:
|
||||
if action == Action.LINK:
|
||||
self._logger.debug("Linking existing user")
|
||||
return self.handle_existing_user_link(connection)
|
||||
if action == Action.AUTH:
|
||||
self._logger.debug("Handling auth user")
|
||||
return self.handle_auth_user(connection)
|
||||
if action == Action.ENROLL:
|
||||
self._logger.debug("Handling enrollment of new user")
|
||||
return self.handle_enroll(connection)
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("Flow non applicable", exc=exc)
|
||||
return self.error_handler(exc, exc.policy_result)
|
||||
# Default case, assume deny
|
||||
messages.error(
|
||||
self.request,
|
||||
error = (
|
||||
_(
|
||||
(
|
||||
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
||||
@ -166,7 +176,17 @@ class SourceFlowManager:
|
||||
% {"source": self.source.name}
|
||||
),
|
||||
)
|
||||
return redirect(reverse("authentik_core:root-redirect"))
|
||||
return self.error_handler(error)
|
||||
|
||||
def error_handler(
|
||||
self, error: Exception, policy_result: Optional[PolicyResult] = None
|
||||
) -> HttpResponse:
|
||||
"""Handle any errors by returning an access denied stage"""
|
||||
response = AccessDeniedResponse(self.request)
|
||||
response.error_message = str(error)
|
||||
if policy_result:
|
||||
response.policy_result = policy_result
|
||||
return response
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||
@ -179,7 +199,9 @@ class SourceFlowManager:
|
||||
]
|
||||
return []
|
||||
|
||||
def _handle_login_flow(self, flow: Flow, **kwargs) -> HttpResponse:
|
||||
def _handle_login_flow(
|
||||
self, flow: Flow, connection: UserSourceConnection, **kwargs
|
||||
) -> HttpResponse:
|
||||
"""Prepare Authentication Plan, redirect user FlowExecutor"""
|
||||
# Ensure redirect is carried through when user was trying to
|
||||
# authorize application
|
||||
@ -193,8 +215,10 @@ class SourceFlowManager:
|
||||
PLAN_CONTEXT_SSO: True,
|
||||
PLAN_CONTEXT_SOURCE: self.source,
|
||||
PLAN_CONTEXT_REDIRECT: final_redirect,
|
||||
PLAN_CONTEXT_SOURCES_CONNECTION: connection,
|
||||
}
|
||||
)
|
||||
kwargs.update(self.policy_context)
|
||||
if not flow:
|
||||
return HttpResponseBadRequest()
|
||||
# We run the Flow planner here so we can pass the Pending user in the context
|
||||
@ -220,7 +244,7 @@ class SourceFlowManager:
|
||||
_("Successfully authenticated with %(source)s!" % {"source": self.source.name}),
|
||||
)
|
||||
flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user}
|
||||
return self._handle_login_flow(self.source.authentication_flow, **flow_kwargs)
|
||||
return self._handle_login_flow(self.source.authentication_flow, connection, **flow_kwargs)
|
||||
|
||||
def handle_existing_user_link(
|
||||
self,
|
||||
@ -264,8 +288,8 @@ class SourceFlowManager:
|
||||
return HttpResponseBadRequest()
|
||||
return self._handle_login_flow(
|
||||
self.source.enrollment_flow,
|
||||
connection,
|
||||
**{
|
||||
PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info),
|
||||
PLAN_CONTEXT_SOURCES_CONNECTION: connection,
|
||||
},
|
||||
)
|
||||
|
||||
@ -6,7 +6,6 @@ from os import environ
|
||||
from boto3.exceptions import Boto3Error
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from dbbackup.db.exceptions import CommandConnectorError
|
||||
from django.conf import settings
|
||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core import management
|
||||
@ -29,7 +28,7 @@ LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task()
|
||||
@prefill_task
|
||||
def clean_expired_models(self: MonitoredTask):
|
||||
"""Remove expired objects"""
|
||||
messages = []
|
||||
@ -63,13 +62,11 @@ def should_backup() -> bool:
|
||||
return False
|
||||
if not CONFIG.y_bool("postgresql.backup.enabled"):
|
||||
return False
|
||||
if settings.DEBUG:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task()
|
||||
@prefill_task
|
||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||
"""Database backup"""
|
||||
self.result_timeout_hours = 25
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
||||
{% block head %}
|
||||
{% endblock %}
|
||||
<meta name="sentry-trace" content="{{ sentry_trace }}" />
|
||||
</head>
|
||||
<body>
|
||||
{% block body %}
|
||||
|
||||
@ -5,6 +5,8 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
||||
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
@ -5,6 +5,8 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
"""Test Applications API"""
|
||||
from django.urls import reverse
|
||||
from django.utils.encoding import force_str
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
|
||||
@ -12,7 +12,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
"""Test applications API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
||||
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||
PolicyBinding.objects.create(
|
||||
@ -31,7 +31,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(force_str(response.content), {"messages": [], "passing": True})
|
||||
self.assertJSONEqual(response.content.decode(), {"messages": [], "passing": True})
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:application-check-access",
|
||||
@ -39,14 +39,14 @@ class TestApplicationsAPI(APITestCase):
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(force_str(response.content), {"messages": ["dummy"], "passing": False})
|
||||
self.assertJSONEqual(response.content.decode(), {"messages": ["dummy"], "passing": False})
|
||||
|
||||
def test_list(self):
|
||||
"""Test list operation without superuser_full_list"""
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(reverse("authentik_api:application-list"))
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
response.content.decode(),
|
||||
{
|
||||
"pagination": {
|
||||
"next": 0,
|
||||
@ -82,7 +82,7 @@ class TestApplicationsAPI(APITestCase):
|
||||
reverse("authentik_api:application-list") + "?superuser_full_list=true"
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
response.content.decode(),
|
||||
{
|
||||
"pagination": {
|
||||
"next": 0,
|
||||
|
||||
@ -2,10 +2,10 @@
|
||||
from json import loads
|
||||
|
||||
from django.urls.base import reverse
|
||||
from django.utils.encoding import force_str
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestAuthenticatedSessionsAPI(APITestCase):
|
||||
@ -13,7 +13,7 @@ class TestAuthenticatedSessionsAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.other_user = User.objects.create(username="normal-user")
|
||||
|
||||
def test_list(self):
|
||||
@ -27,5 +27,5 @@ class TestAuthenticatedSessionsAPI(APITestCase):
|
||||
self.client.force_login(self.other_user)
|
||||
response = self.client.get(reverse("authentik_api:authenticatedsession-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(force_str(response.content))
|
||||
body = loads(response.content.decode())
|
||||
self.assertEqual(body["pagination"]["count"], 1)
|
||||
|
||||
@ -5,6 +5,7 @@ from django.test.testcases import TestCase
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestImpersonation(TestCase):
|
||||
@ -13,14 +14,14 @@ class TestImpersonation(TestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.other_user = User.objects.create(username="to-impersonate")
|
||||
self.akadmin = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
def test_impersonate_simple(self):
|
||||
"""test simple impersonation and un-impersonation"""
|
||||
# test with an inactive user to ensure that still works
|
||||
self.other_user.is_active = False
|
||||
self.other_user.save()
|
||||
self.client.force_login(self.akadmin)
|
||||
self.client.force_login(self.user)
|
||||
|
||||
self.client.get(
|
||||
reverse(
|
||||
@ -32,13 +33,13 @@ class TestImpersonation(TestCase):
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
self.assertEqual(response_body["original"]["username"], self.akadmin.username)
|
||||
self.assertEqual(response_body["original"]["username"], self.user.username)
|
||||
|
||||
self.client.get(reverse("authentik_core:impersonate-end"))
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.akadmin.username)
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
self.assertNotIn("original", response_body)
|
||||
|
||||
def test_impersonate_denied(self):
|
||||
@ -46,7 +47,7 @@ class TestImpersonation(TestCase):
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
self.client.get(
|
||||
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.akadmin.pk})
|
||||
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.user.pk})
|
||||
)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
"""authentik core models tests"""
|
||||
from time import sleep
|
||||
from typing import Callable, Type
|
||||
from typing import Callable
|
||||
|
||||
from django.test import TestCase
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
@ -27,9 +27,12 @@ class TestModels(TestCase):
|
||||
self.assertFalse(token.is_expired)
|
||||
|
||||
|
||||
def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||
def source_tester_factory(test_model: type[Stage]) -> Callable:
|
||||
"""Test source"""
|
||||
|
||||
factory = RequestFactory()
|
||||
request = factory.get("/")
|
||||
|
||||
def tester(self: TestModels):
|
||||
model_class = None
|
||||
if test_model._meta.abstract:
|
||||
@ -38,18 +41,18 @@ def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||
model_class = test_model()
|
||||
model_class.slug = "test"
|
||||
self.assertIsNotNone(model_class.component)
|
||||
_ = model_class.ui_login_button
|
||||
_ = model_class.ui_user_settings
|
||||
_ = model_class.ui_login_button(request)
|
||||
_ = model_class.ui_user_settings()
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
def provider_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||
def provider_tester_factory(test_model: type[Stage]) -> Callable:
|
||||
"""Test provider"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
model_class = None
|
||||
if test_model._meta.abstract:
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
model_class = test_model.__bases__[0]()
|
||||
else:
|
||||
model_class = test_model()
|
||||
@ -59,6 +62,6 @@ def provider_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||
|
||||
|
||||
for model in all_subclasses(Source):
|
||||
setattr(TestModels, f"test_model_{model.__name__}", source_tester_factory(model))
|
||||
setattr(TestModels, f"test_source_{model.__name__}", source_tester_factory(model))
|
||||
for model in all_subclasses(Provider):
|
||||
setattr(TestModels, f"test_model_{model.__name__}", provider_tester_factory(model))
|
||||
setattr(TestModels, f"test_provider_{model.__name__}", provider_tester_factory(model))
|
||||
|
||||
@ -6,7 +6,8 @@ from rest_framework.serializers import ValidationError
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.models import PropertyMapping, User
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestPropertyMappingAPI(APITestCase):
|
||||
@ -17,7 +18,7 @@ class TestPropertyMappingAPI(APITestCase):
|
||||
self.mapping = PropertyMapping.objects.create(
|
||||
name="dummy", expression="""return {'foo': 'bar'}"""
|
||||
)
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_test_call(self):
|
||||
@ -40,7 +41,7 @@ class TestPropertyMappingAPI(APITestCase):
|
||||
expr = "return True"
|
||||
self.assertEqual(PropertyMappingSerializer().validate_expression(expr), expr)
|
||||
with self.assertRaises(ValidationError):
|
||||
print(PropertyMappingSerializer().validate_expression("/"))
|
||||
PropertyMappingSerializer().validate_expression("/")
|
||||
|
||||
def test_types(self):
|
||||
"""Test PropertyMappigns's types endpoint"""
|
||||
|
||||
@ -2,7 +2,8 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import PropertyMapping, User
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestProvidersAPI(APITestCase):
|
||||
@ -13,7 +14,7 @@ class TestProvidersAPI(APITestCase):
|
||||
self.mapping = PropertyMapping.objects.create(
|
||||
name="dummy", expression="""return {'foo': 'bar'}"""
|
||||
)
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_types(self):
|
||||
|
||||
@ -6,8 +6,12 @@ from guardian.utils import get_anonymous_user
|
||||
|
||||
from authentik.core.models import SourceUserMatchingModes, User
|
||||
from authentik.core.sources.flow_manager import Action
|
||||
from authentik.flows.models import Flow, FlowDesignation
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import get_request
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||
from authentik.sources.oauth.views.callback import OAuthSourceFlowManager
|
||||
|
||||
@ -17,7 +21,7 @@ class TestSourceFlowManager(TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.source = OAuthSource.objects.create(name="test")
|
||||
self.source: OAuthSource = OAuthSource.objects.create(name="test")
|
||||
self.factory = RequestFactory()
|
||||
self.identifier = generate_id()
|
||||
|
||||
@ -143,3 +147,34 @@ class TestSourceFlowManager(TestCase):
|
||||
action, _ = flow_manager.get_action()
|
||||
self.assertEqual(action, Action.ENROLL)
|
||||
flow_manager.get_flow()
|
||||
|
||||
def test_error_non_applicable_flow(self):
|
||||
"""Test error handling when a source selected flow is non-applicable due to a policy"""
|
||||
self.source.user_matching_mode = SourceUserMatchingModes.USERNAME_LINK
|
||||
|
||||
flow = Flow.objects.create(
|
||||
name="test", slug="test", title="test", designation=FlowDesignation.ENROLLMENT
|
||||
)
|
||||
policy = ExpressionPolicy.objects.create(
|
||||
name="false", expression="""ak_message("foo");return False"""
|
||||
)
|
||||
PolicyBinding.objects.create(
|
||||
policy=policy,
|
||||
target=flow,
|
||||
order=0,
|
||||
)
|
||||
self.source.enrollment_flow = flow
|
||||
self.source.save()
|
||||
|
||||
flow_manager = OAuthSourceFlowManager(
|
||||
self.source,
|
||||
get_request("/", user=AnonymousUser()),
|
||||
self.identifier,
|
||||
{"username": "foo"},
|
||||
)
|
||||
action, _ = flow_manager.get_action()
|
||||
self.assertEqual(action, Action.ENROLL)
|
||||
response = flow_manager.get_flow()
|
||||
self.assertIsInstance(response, AccessDeniedResponse)
|
||||
# pylint: disable=no-member
|
||||
self.assertEqual(response.error_message, "foo")
|
||||
|
||||
@ -8,6 +8,7 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
|
||||
|
||||
class TestTokenAPI(APITestCase):
|
||||
@ -16,7 +17,7 @@ class TestTokenAPI(APITestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.create(username="testuser")
|
||||
self.admin = User.objects.get(username="akadmin")
|
||||
self.admin = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_token_create(self):
|
||||
@ -53,7 +54,9 @@ class TestTokenAPI(APITestCase):
|
||||
|
||||
def test_token_expire(self):
|
||||
"""Test Token expire task"""
|
||||
token: Token = Token.objects.create(expires=now(), user=get_anonymous_user())
|
||||
token: Token = Token.objects.create(
|
||||
expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API
|
||||
)
|
||||
key = token.key
|
||||
clean_expired_models.delay().get()
|
||||
token.refresh_from_db()
|
||||
|
||||
@ -2,8 +2,15 @@
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import USER_ATTRIBUTE_CHANGE_EMAIL, USER_ATTRIBUTE_CHANGE_USERNAME, User
|
||||
from authentik.flows.models import Flow, FlowDesignation
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CHANGE_EMAIL,
|
||||
USER_ATTRIBUTE_CHANGE_NAME,
|
||||
USER_ATTRIBUTE_CHANGE_USERNAME,
|
||||
User,
|
||||
)
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
||||
from authentik.flows.models import FlowDesignation
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
@ -12,16 +19,33 @@ class TestUsersAPI(APITestCase):
|
||||
"""Test Users API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.admin = User.objects.get(username="akadmin")
|
||||
self.admin = create_test_admin_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_update_self(self):
|
||||
"""Test update_self"""
|
||||
self.admin.attributes["foo"] = "bar"
|
||||
self.admin.save()
|
||||
self.admin.refresh_from_db()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"}
|
||||
)
|
||||
self.admin.refresh_from_db()
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(self.admin.attributes["foo"], "bar")
|
||||
self.assertEqual(self.admin.username, "foo")
|
||||
self.assertEqual(self.admin.name, "foo")
|
||||
|
||||
def test_update_self_name_denied(self):
|
||||
"""Test update_self"""
|
||||
self.admin.attributes[USER_ATTRIBUTE_CHANGE_NAME] = False
|
||||
self.admin.save()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"}
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_update_self_username_denied(self):
|
||||
"""Test update_self"""
|
||||
@ -67,12 +91,22 @@ class TestUsersAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_set_password(self):
|
||||
"""Test Direct password set"""
|
||||
self.client.force_login(self.admin)
|
||||
new_pw = generate_key()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-set-password", kwargs={"pk": self.admin.pk}),
|
||||
data={"password": new_pw},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.admin.refresh_from_db()
|
||||
self.assertTrue(self.admin.check_password(new_pw))
|
||||
|
||||
def test_recovery(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
flow = create_test_flow(FlowDesignation.RECOVERY)
|
||||
tenant: Tenant = create_test_tenant()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
self.client.force_login(self.admin)
|
||||
@ -99,10 +133,8 @@ class TestUsersAPI(APITestCase):
|
||||
"""Test user recovery link (no email stage)"""
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
flow = create_test_flow(designation=FlowDesignation.RECOVERY)
|
||||
tenant: Tenant = create_test_tenant()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
self.client.force_login(self.admin)
|
||||
@ -115,10 +147,8 @@ class TestUsersAPI(APITestCase):
|
||||
"""Test user recovery link"""
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
flow = Flow.objects.create(
|
||||
name="test", title="test", slug="test", designation=FlowDesignation.RECOVERY
|
||||
)
|
||||
tenant: Tenant = Tenant.objects.first()
|
||||
flow = create_test_flow(FlowDesignation.RECOVERY)
|
||||
tenant: Tenant = create_test_tenant()
|
||||
tenant.flow_recovery = flow
|
||||
tenant.save()
|
||||
|
||||
|
||||
57
authentik/core/tests/utils.py
Normal file
57
authentik/core/tests/utils.py
Normal file
@ -0,0 +1,57 @@
|
||||
"""Test Utils"""
|
||||
from typing import Optional
|
||||
|
||||
from django.utils.text import slugify
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.flows.models import Flow, FlowDesignation
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
def create_test_flow(designation: FlowDesignation = FlowDesignation.STAGE_CONFIGURATION) -> Flow:
|
||||
"""Generate a flow that can be used for testing"""
|
||||
uid = generate_id(10)
|
||||
return Flow.objects.create(
|
||||
name=uid,
|
||||
title=uid,
|
||||
slug=slugify(uid),
|
||||
designation=designation,
|
||||
)
|
||||
|
||||
|
||||
def create_test_admin_user(name: Optional[str] = None) -> User:
|
||||
"""Generate a test-admin user"""
|
||||
uid = generate_id(20) if not name else name
|
||||
group = Group.objects.create(name=uid, is_superuser=True)
|
||||
user: User = User.objects.create(
|
||||
username=uid,
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
user.set_password(uid)
|
||||
user.save()
|
||||
group.users.add(user)
|
||||
return user
|
||||
|
||||
|
||||
def create_test_tenant() -> Tenant:
|
||||
"""Generate a test tenant, removing all other tenants to make sure this one
|
||||
matches."""
|
||||
uid = generate_id(20)
|
||||
Tenant.objects.all().delete()
|
||||
return Tenant.objects.create(domain=uid, default=True)
|
||||
|
||||
|
||||
def create_test_cert() -> CertificateKeyPair:
|
||||
"""Generate a certificate for testing"""
|
||||
CertificateKeyPair.objects.filter(name="goauthentik.io").delete()
|
||||
builder = CertificateBuilder()
|
||||
builder.common_name = "goauthentik.io"
|
||||
builder.build(
|
||||
subject_alt_names=["goauthentik.io"],
|
||||
validity_days=360,
|
||||
)
|
||||
return builder.save()
|
||||
@ -29,3 +29,4 @@ class UserSettingSerializer(PassiveSerializer):
|
||||
component = CharField()
|
||||
title = CharField()
|
||||
configure_url = CharField(required=False)
|
||||
icon_url = CharField()
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""Crypto API Views"""
|
||||
from typing import Optional
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
@ -20,6 +22,7 @@ from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.managed import MANAGED_KEY
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
@ -30,6 +33,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
cert_expiry = DateTimeField(source="certificate.not_valid_after", read_only=True)
|
||||
cert_subject = SerializerMethodField()
|
||||
private_key_available = SerializerMethodField()
|
||||
private_key_type = SerializerMethodField()
|
||||
|
||||
certificate_download_url = SerializerMethodField()
|
||||
private_key_download_url = SerializerMethodField()
|
||||
@ -42,6 +46,13 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"""Show if this keypair has a private key configured or not"""
|
||||
return instance.key_data != "" and instance.key_data is not None
|
||||
|
||||
def get_private_key_type(self, instance: CertificateKeyPair) -> Optional[str]:
|
||||
"""Get the private key's type, if set"""
|
||||
key = instance.private_key
|
||||
if key:
|
||||
return key.__class__.__name__.replace("_", "").lower().replace("privatekey", "")
|
||||
return None
|
||||
|
||||
def get_certificate_download_url(self, instance: CertificateKeyPair) -> str:
|
||||
"""Get URL to download certificate"""
|
||||
return (
|
||||
@ -71,7 +82,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
return value
|
||||
|
||||
def validate_key_data(self, value: str) -> str:
|
||||
"""Verify that input is a valid PEM RSA Key"""
|
||||
"""Verify that input is a valid PEM Key"""
|
||||
# Since this field is optional, data can be empty.
|
||||
if value != "":
|
||||
try:
|
||||
@ -97,6 +108,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"cert_expiry",
|
||||
"cert_subject",
|
||||
"private_key_available",
|
||||
"private_key_type",
|
||||
"certificate_download_url",
|
||||
"private_key_download_url",
|
||||
"managed",
|
||||
@ -141,9 +153,11 @@ class CertificateKeyPairFilter(FilterSet):
|
||||
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
"""CertificateKeyPair Viewset"""
|
||||
|
||||
queryset = CertificateKeyPair.objects.exclude(managed__isnull=False)
|
||||
queryset = CertificateKeyPair.objects.exclude(managed=MANAGED_KEY)
|
||||
serializer_class = CertificateKeyPairSerializer
|
||||
filterset_class = CertificateKeyPairFilter
|
||||
ordering = ["name"]
|
||||
search_fields = ["name"]
|
||||
|
||||
@permission_required(None, ["authentik_crypto.add_certificatekeypair"])
|
||||
@extend_schema(
|
||||
@ -189,7 +203,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
secret=certificate,
|
||||
type="certificate",
|
||||
).from_http(request)
|
||||
if "download" in request._request.GET:
|
||||
if "download" in request.query_params:
|
||||
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||
response = HttpResponse(
|
||||
certificate.certificate_data, content_type="application/x-pem-file"
|
||||
@ -220,7 +234,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
secret=certificate,
|
||||
type="private_key",
|
||||
).from_http(request)
|
||||
if "download" in request._request.GET:
|
||||
if "download" in request.query_params:
|
||||
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
|
||||
response[
|
||||
|
||||
@ -13,3 +13,4 @@ class AuthentikCryptoConfig(AppConfig):
|
||||
|
||||
def ready(self):
|
||||
import_module("authentik.crypto.managed")
|
||||
import_module("authentik.crypto.tasks")
|
||||
|
||||
@ -44,7 +44,7 @@ class CertificateBuilder:
|
||||
"""Build self-signed certificate"""
|
||||
one_day = datetime.timedelta(1, 0, 0)
|
||||
self.__private_key = rsa.generate_private_key(
|
||||
public_exponent=65537, key_size=2048, backend=default_backend()
|
||||
public_exponent=65537, key_size=4096, backend=default_backend()
|
||||
)
|
||||
self.__public_key = self.__private_key.public_key()
|
||||
alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []]
|
||||
|
||||
@ -6,15 +6,23 @@ from uuid import uuid4
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import (
|
||||
EllipticCurvePrivateKey,
|
||||
EllipticCurvePublicKey,
|
||||
)
|
||||
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey, Ed25519PublicKey
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509 import Certificate, load_pem_x509_certificate
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.models import CreatedUpdatedModel
|
||||
from authentik.managed.models import ManagedModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
||||
"""CertificateKeyPair that can be used for signing or encrypting if `key_data`
|
||||
@ -33,8 +41,8 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
||||
)
|
||||
|
||||
_cert: Optional[Certificate] = None
|
||||
_private_key: Optional[RSAPrivateKey] = None
|
||||
_public_key: Optional[RSAPublicKey] = None
|
||||
_private_key: Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey] = None
|
||||
_public_key: Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey] = None
|
||||
|
||||
@property
|
||||
def certificate(self) -> Certificate:
|
||||
@ -46,23 +54,26 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
|
||||
return self._cert
|
||||
|
||||
@property
|
||||
def public_key(self) -> Optional[RSAPublicKey]:
|
||||
def public_key(self) -> Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey]:
|
||||
"""Get public key of the private key"""
|
||||
if not self._public_key:
|
||||
self._public_key = self.private_key.public_key()
|
||||
return self._public_key
|
||||
|
||||
@property
|
||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||
def private_key(
|
||||
self,
|
||||
) -> Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey]:
|
||||
"""Get python cryptography PrivateKey instance"""
|
||||
if not self._private_key and self._private_key != "":
|
||||
if not self._private_key and self.key_data != "":
|
||||
try:
|
||||
self._private_key = load_pem_private_key(
|
||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
except ValueError:
|
||||
except ValueError as exc:
|
||||
LOGGER.warning(exc)
|
||||
return None
|
||||
return self._private_key
|
||||
|
||||
|
||||
10
authentik/crypto/settings.py
Normal file
10
authentik/crypto/settings.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""Crypto task Settings"""
|
||||
from celery.schedules import crontab
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"crypto_certificate_discovery": {
|
||||
"task": "authentik.crypto.tasks.certificate_discovery",
|
||||
"schedule": crontab(minute="*/5"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
95
authentik/crypto/tasks.py
Normal file
95
authentik/crypto/tasks.py
Normal file
@ -0,0 +1,95 @@
|
||||
"""Crypto tasks"""
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509.base import load_pem_x509_certificate
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
MANAGED_DISCOVERED = "goauthentik.io/crypto/discovered/%s"
|
||||
|
||||
|
||||
def ensure_private_key_valid(body: str):
|
||||
"""Attempt loading of a PEM Private key without password"""
|
||||
load_pem_private_key(
|
||||
str.encode("\n".join([x.strip() for x in body.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
return body
|
||||
|
||||
|
||||
def ensure_certificate_valid(body: str):
|
||||
"""Attempt loading of a PEM-encoded certificate"""
|
||||
load_pem_x509_certificate(body.encode("utf-8"), default_backend())
|
||||
return body
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task
|
||||
def certificate_discovery(self: MonitoredTask):
|
||||
"""Discover, import and update certificates from the filesystem"""
|
||||
certs = {}
|
||||
private_keys = {}
|
||||
discovered = 0
|
||||
for file in glob(CONFIG.y("cert_discovery_dir") + "/**", recursive=True):
|
||||
path = Path(file)
|
||||
if not path.exists():
|
||||
continue
|
||||
if path.is_dir():
|
||||
continue
|
||||
# For certbot setups, we want to ignore archive.
|
||||
if "archive" in file:
|
||||
continue
|
||||
# Support certbot's directory structure
|
||||
if path.name in ["fullchain.pem", "privkey.pem"]:
|
||||
cert_name = path.parent.name
|
||||
else:
|
||||
cert_name = path.name.replace(path.suffix, "")
|
||||
try:
|
||||
with open(path, "r+", encoding="utf-8") as _file:
|
||||
body = _file.read()
|
||||
if "PRIVATE KEY" in body:
|
||||
private_keys[cert_name] = ensure_private_key_valid(body)
|
||||
else:
|
||||
certs[cert_name] = ensure_certificate_valid(body)
|
||||
except (OSError, ValueError) as exc:
|
||||
LOGGER.warning("Failed to open file or invalid format", exc=exc, file=path)
|
||||
discovered += 1
|
||||
for name, cert_data in certs.items():
|
||||
cert = CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % name).first()
|
||||
if not cert:
|
||||
cert = CertificateKeyPair(
|
||||
name=name,
|
||||
managed=MANAGED_DISCOVERED % name,
|
||||
)
|
||||
dirty = False
|
||||
if cert.certificate_data != cert_data:
|
||||
cert.certificate_data = cert_data
|
||||
dirty = True
|
||||
if name in private_keys:
|
||||
if cert.key_data != private_keys[name]:
|
||||
cert.key_data = private_keys[name]
|
||||
dirty = True
|
||||
if dirty:
|
||||
cert.save()
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL,
|
||||
messages=[_("Successfully imported %(count)d files." % {"count": discovered})],
|
||||
)
|
||||
)
|
||||
@ -1,25 +1,37 @@
|
||||
"""Crypto tests"""
|
||||
import datetime
|
||||
from os import makedirs
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.api.used_by import DeleteAction
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.crypto.api import CertificateKeyPairSerializer
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.crypto.tasks import MANAGED_DISCOVERED, certificate_discovery
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
|
||||
|
||||
class TestCrypto(TestCase):
|
||||
class TestCrypto(APITestCase):
|
||||
"""Test Crypto validation"""
|
||||
|
||||
def test_model_private(self):
|
||||
"""Test model private key"""
|
||||
cert = CertificateKeyPair.objects.create(
|
||||
name="test",
|
||||
certificate_data="foo",
|
||||
key_data="foo",
|
||||
)
|
||||
self.assertIsNone(cert.private_key)
|
||||
|
||||
def test_serializer(self):
|
||||
"""Test API Validation"""
|
||||
keypair = CertificateKeyPair.objects.first()
|
||||
keypair = create_test_cert()
|
||||
self.assertTrue(
|
||||
CertificateKeyPairSerializer(
|
||||
data={
|
||||
@ -54,10 +66,38 @@ class TestCrypto(TestCase):
|
||||
self.assertEqual(instance.name, "test-cert")
|
||||
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
|
||||
|
||||
def test_builder_api(self):
|
||||
"""Test Builder (via API)"""
|
||||
self.client.force_login(create_test_admin_user())
|
||||
self.client.post(
|
||||
reverse("authentik_api:certificatekeypair-generate"),
|
||||
data={"common_name": "foo", "subject_alt_name": "bar,baz", "validity_days": 3},
|
||||
)
|
||||
self.assertTrue(CertificateKeyPair.objects.filter(name="foo").exists())
|
||||
|
||||
def test_builder_api_invalid(self):
|
||||
"""Test Builder (via API) (invalid)"""
|
||||
self.client.force_login(create_test_admin_user())
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:certificatekeypair-generate"),
|
||||
data={},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_list(self):
|
||||
"""Test API List"""
|
||||
self.client.force_login(create_test_admin_user())
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
)
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
|
||||
def test_certificate_download(self):
|
||||
"""Test certificate export (download)"""
|
||||
self.client.force_login(User.objects.get(username="akadmin"))
|
||||
keypair = CertificateKeyPair.objects.first()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
keypair = create_test_cert()
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-certificate",
|
||||
@ -77,8 +117,8 @@ class TestCrypto(TestCase):
|
||||
|
||||
def test_private_key_download(self):
|
||||
"""Test private_key export (download)"""
|
||||
self.client.force_login(User.objects.get(username="akadmin"))
|
||||
keypair = CertificateKeyPair.objects.first()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
keypair = create_test_cert()
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-private-key",
|
||||
@ -98,15 +138,15 @@ class TestCrypto(TestCase):
|
||||
|
||||
def test_used_by(self):
|
||||
"""Test used_by endpoint"""
|
||||
self.client.force_login(User.objects.get(username="akadmin"))
|
||||
keypair = CertificateKeyPair.objects.first()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
keypair = create_test_cert()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="test",
|
||||
client_id="test",
|
||||
client_secret=generate_key(),
|
||||
authorization_flow=Flow.objects.first(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="http://localhost",
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
signing_key=keypair,
|
||||
)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
@ -127,3 +167,36 @@ class TestCrypto(TestCase):
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
def test_discovery(self):
|
||||
"""Test certificate discovery"""
|
||||
builder = CertificateBuilder()
|
||||
builder.common_name = "test-cert"
|
||||
with self.assertRaises(ValueError):
|
||||
builder.save()
|
||||
builder.build(
|
||||
subject_alt_names=[],
|
||||
validity_days=3,
|
||||
)
|
||||
with TemporaryDirectory() as temp_dir:
|
||||
with open(f"{temp_dir}/foo.pem", "w+", encoding="utf-8") as _cert:
|
||||
_cert.write(builder.certificate)
|
||||
with open(f"{temp_dir}/foo.key", "w+", encoding="utf-8") as _key:
|
||||
_key.write(builder.private_key)
|
||||
makedirs(f"{temp_dir}/foo.bar", exist_ok=True)
|
||||
with open(f"{temp_dir}/foo.bar/fullchain.pem", "w+", encoding="utf-8") as _cert:
|
||||
_cert.write(builder.certificate)
|
||||
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
|
||||
_key.write(builder.private_key)
|
||||
with CONFIG.patch("cert_discovery_dir", temp_dir):
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
certificate_discovery() # pylint: disable=no-value-for-parameter
|
||||
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
|
||||
managed=MANAGED_DISCOVERED % "foo"
|
||||
).first()
|
||||
self.assertIsNotNone(keypair)
|
||||
self.assertIsNotNone(keypair.certificate)
|
||||
self.assertIsNotNone(keypair.private_key)
|
||||
self.assertTrue(
|
||||
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists()
|
||||
)
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""Events API Views"""
|
||||
from json import loads
|
||||
|
||||
import django_filters
|
||||
from django.db.models.aggregates import Count
|
||||
from django.db.models.fields.json import KeyTextTransform
|
||||
@ -12,6 +14,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
@ -110,13 +113,20 @@ class EventViewSet(ModelViewSet):
|
||||
@extend_schema(
|
||||
methods=["GET"],
|
||||
responses={200: EventTopPerUserSerializer(many=True)},
|
||||
filters=[],
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"action",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=False,
|
||||
),
|
||||
OpenApiParameter(
|
||||
"top_n",
|
||||
type=OpenApiTypes.INT,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=False,
|
||||
)
|
||||
),
|
||||
],
|
||||
)
|
||||
@action(detail=False, methods=["GET"], pagination_class=None)
|
||||
@ -137,6 +147,40 @@ class EventViewSet(ModelViewSet):
|
||||
.order_by("-counted_events")[:top_n]
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
methods=["GET"],
|
||||
responses={200: CoordinateSerializer(many=True)},
|
||||
filters=[],
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"action",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=False,
|
||||
),
|
||||
OpenApiParameter(
|
||||
"query",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=False,
|
||||
),
|
||||
],
|
||||
)
|
||||
@action(detail=False, methods=["GET"], pagination_class=None)
|
||||
def per_month(self, request: Request):
|
||||
"""Get the count of events per month"""
|
||||
filtered_action = request.query_params.get("action", EventAction.LOGIN)
|
||||
try:
|
||||
query = loads(request.query_params.get("query", "{}"))
|
||||
except ValueError:
|
||||
return Response(status=400)
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(action=filtered_action)
|
||||
.filter(**query)
|
||||
.get_events_per_day()
|
||||
)
|
||||
|
||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def actions(self, request: Request) -> Response:
|
||||
|
||||
@ -15,12 +15,14 @@ from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.models import (
|
||||
Event,
|
||||
Notification,
|
||||
NotificationSeverity,
|
||||
NotificationTransport,
|
||||
NotificationTransportError,
|
||||
TransportMode,
|
||||
)
|
||||
from authentik.events.utils import get_user
|
||||
|
||||
|
||||
class NotificationTransportSerializer(ModelSerializer):
|
||||
@ -86,6 +88,12 @@ class NotificationTransportViewSet(UsedByMixin, ModelViewSet):
|
||||
severity=NotificationSeverity.NOTICE,
|
||||
body=f"Test Notification from transport {transport.name}",
|
||||
user=request.user,
|
||||
event=Event(
|
||||
action="Test",
|
||||
user=get_user(request.user),
|
||||
app=self.__class__.__module__,
|
||||
context={"foo": "bar"},
|
||||
),
|
||||
)
|
||||
try:
|
||||
response = NotificationTransportTestSerializer(
|
||||
|
||||
@ -7,6 +7,7 @@ from typing import Optional, TypedDict
|
||||
from geoip2.database import Reader
|
||||
from geoip2.errors import GeoIP2Error
|
||||
from geoip2.models import City
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
@ -34,12 +35,11 @@ class GeoIPReader:
|
||||
|
||||
def __open(self):
|
||||
"""Get GeoIP Reader, if configured, otherwise none"""
|
||||
path = CONFIG.y("authentik.geoip")
|
||||
path = CONFIG.y("geoip")
|
||||
if path == "" or not path:
|
||||
return
|
||||
try:
|
||||
reader = Reader(path)
|
||||
self.__reader = reader
|
||||
self.__reader = Reader(path)
|
||||
self.__last_mtime = stat(path).st_mtime
|
||||
LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime)
|
||||
except OSError as exc:
|
||||
@ -62,13 +62,17 @@ class GeoIPReader:
|
||||
|
||||
def city(self, ip_address: str) -> Optional[City]:
|
||||
"""Wrapper for Reader.city"""
|
||||
if not self.enabled:
|
||||
return None
|
||||
self.__check_expired()
|
||||
try:
|
||||
return self.__reader.city(ip_address)
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
with Hub.current.start_span(
|
||||
op="authentik.events.geo.city",
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.enabled:
|
||||
return None
|
||||
self.__check_expired()
|
||||
try:
|
||||
return self.__reader.city(ip_address)
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def city_dict(self, ip_address: str) -> Optional[GeoIPDict]:
|
||||
"""Wrapper for self.city that returns a dict"""
|
||||
|
||||
@ -4,7 +4,6 @@ import uuid
|
||||
from datetime import timedelta
|
||||
from typing import Iterable
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
@ -12,6 +11,7 @@ from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
import authentik.events.models
|
||||
import authentik.lib.models
|
||||
from authentik.events.models import EventAction, NotificationSeverity, TransportMode
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Event = apps.get_model("authentik_events", "Event")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for event in Event.objects.all():
|
||||
for event in Event.objects.using(db_alias).all():
|
||||
event.delete()
|
||||
# Because event objects cannot be updated, we have to re-create them
|
||||
event.pk = None
|
||||
@ -314,169 +314,10 @@ class Migration(migrations.Migration):
|
||||
old_name="user_json",
|
||||
new_name="user",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("sign_up", "Sign Up"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="event",
|
||||
name="date",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="NotificationTransport",
|
||||
fields=[
|
||||
@ -610,68 +451,6 @@ class Migration(migrations.Migration):
|
||||
help_text="Only send notification once, for example when sending a webhook into a chat channel.",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=token_view_to_secret_view,
|
||||
),
|
||||
@ -688,76 +467,11 @@ class Migration(migrations.Migration):
|
||||
migrations.RunPython(
|
||||
code=update_expires,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="event",
|
||||
name="tenant",
|
||||
field=models.JSONField(blank=True, default=authentik.events.models.default_tenant),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
@ -776,6 +490,7 @@ class Migration(migrations.Migration):
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("flow_execution", "Flow Execution"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
@ -826,6 +541,8 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="notificationtransport",
|
||||
name="webhook_url",
|
||||
field=models.TextField(blank=True, validators=[django.core.validators.URLValidator()]),
|
||||
field=models.TextField(
|
||||
blank=True, validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@ -10,7 +10,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Event = apps.get_model("authentik_events", "Event")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for event in Event.objects.all():
|
||||
for event in Event.objects.using(db_alias).all():
|
||||
event.delete()
|
||||
# Because event objects cannot be updated, we have to re-create them
|
||||
event.pk = None
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
# Generated by Django 3.2.7 on 2021-10-04 15:31
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.lib.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -14,6 +15,8 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="notificationtransport",
|
||||
name="webhook_url",
|
||||
field=models.TextField(blank=True, validators=[django.core.validators.URLValidator()]),
|
||||
field=models.TextField(
|
||||
blank=True, validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@ -1,13 +1,20 @@
|
||||
"""authentik events models"""
|
||||
import time
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
from inspect import getmodule, stack
|
||||
from inspect import currentframe
|
||||
from smtplib import SMTPException
|
||||
from typing import TYPE_CHECKING, Optional, Type, Union
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.validators import URLValidator
|
||||
from django.db import models
|
||||
from django.db.models import Count, ExpressionWrapper, F
|
||||
from django.db.models.fields import DurationField
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.models.functions.datetime import ExtractDay
|
||||
from django.db.models.manager import Manager
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import HttpRequest
|
||||
from django.http.request import QueryDict
|
||||
from django.utils.timezone import now
|
||||
@ -20,6 +27,7 @@ from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION
|
||||
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
||||
from authentik.lib.models import DomainlessURLValidator
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import get_client_ip, get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
@ -70,6 +78,7 @@ class EventAction(models.TextChoices):
|
||||
IMPERSONATION_STARTED = "impersonation_started"
|
||||
IMPERSONATION_ENDED = "impersonation_ended"
|
||||
|
||||
FLOW_EXECUTION = "flow_execution"
|
||||
POLICY_EXECUTION = "policy_execution"
|
||||
POLICY_EXCEPTION = "policy_exception"
|
||||
PROPERTY_MAPPING_EXCEPTION = "property_mapping_exception"
|
||||
@ -90,6 +99,72 @@ class EventAction(models.TextChoices):
|
||||
CUSTOM_PREFIX = "custom_"
|
||||
|
||||
|
||||
class EventQuerySet(QuerySet):
|
||||
"""Custom events query set with helper functions"""
|
||||
|
||||
def get_events_per_hour(self) -> list[dict[str, int]]:
|
||||
"""Get event count by hour in the last day, fill with zeros"""
|
||||
date_from = now() - timedelta(days=1)
|
||||
result = (
|
||||
self.filter(created__gte=date_from)
|
||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||
.annotate(age_hours=ExtractHour("age"))
|
||||
.values("age_hours")
|
||||
.annotate(count=Count("pk"))
|
||||
.order_by("age_hours")
|
||||
)
|
||||
data = Counter({int(d["age_hours"]): d["count"] for d in result})
|
||||
results = []
|
||||
_now = now()
|
||||
for hour in range(0, -24, -1):
|
||||
results.append(
|
||||
{
|
||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||
"y_cord": data[hour * -1],
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
def get_events_per_day(self) -> list[dict[str, int]]:
|
||||
"""Get event count by hour in the last day, fill with zeros"""
|
||||
date_from = now() - timedelta(weeks=4)
|
||||
result = (
|
||||
self.filter(created__gte=date_from)
|
||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||
.annotate(age_days=ExtractDay("age"))
|
||||
.values("age_days")
|
||||
.annotate(count=Count("pk"))
|
||||
.order_by("age_days")
|
||||
)
|
||||
data = Counter({int(d["age_days"]): d["count"] for d in result})
|
||||
results = []
|
||||
_now = now()
|
||||
for day in range(0, -30, -1):
|
||||
results.append(
|
||||
{
|
||||
"x_cord": time.mktime((_now + timedelta(days=day)).timetuple()) * 1000,
|
||||
"y_cord": data[day * -1],
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
class EventManager(Manager):
|
||||
"""Custom helper methods for Events"""
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
"""use custom queryset"""
|
||||
return EventQuerySet(self.model, using=self._db)
|
||||
|
||||
def get_events_per_hour(self) -> list[dict[str, int]]:
|
||||
"""Wrap method from queryset"""
|
||||
return self.get_queryset().get_events_per_hour()
|
||||
|
||||
def get_events_per_day(self) -> list[dict[str, int]]:
|
||||
"""Wrap method from queryset"""
|
||||
return self.get_queryset().get_events_per_day()
|
||||
|
||||
|
||||
class Event(ExpiringModel):
|
||||
"""An individual Audit/Metrics/Notification/Error Event"""
|
||||
|
||||
@ -105,6 +180,8 @@ class Event(ExpiringModel):
|
||||
# Shadow the expires attribute from ExpiringModel to override the default duration
|
||||
expires = models.DateTimeField(default=default_event_duration)
|
||||
|
||||
objects = EventManager()
|
||||
|
||||
@staticmethod
|
||||
def _get_app_from_request(request: HttpRequest) -> str:
|
||||
if not isinstance(request, HttpRequest):
|
||||
@ -113,16 +190,17 @@ class Event(ExpiringModel):
|
||||
|
||||
@staticmethod
|
||||
def new(
|
||||
action: Union[str, EventAction],
|
||||
action: str | EventAction,
|
||||
app: Optional[str] = None,
|
||||
_inspect_offset: int = 1,
|
||||
**kwargs,
|
||||
) -> "Event":
|
||||
"""Create new Event instance from arguments. Instance is NOT saved."""
|
||||
if not isinstance(action, EventAction):
|
||||
action = EventAction.CUSTOM_PREFIX + action
|
||||
if not app:
|
||||
app = getmodule(stack()[_inspect_offset][0]).__name__
|
||||
current = currentframe()
|
||||
parent = current.f_back
|
||||
app = parent.f_globals["__name__"]
|
||||
cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs))
|
||||
event = Event(action=action, app=app, context=cleaned_kwargs)
|
||||
return event
|
||||
@ -224,7 +302,7 @@ class NotificationTransport(models.Model):
|
||||
name = models.TextField(unique=True)
|
||||
mode = models.TextField(choices=TransportMode.choices)
|
||||
|
||||
webhook_url = models.TextField(blank=True, validators=[URLValidator()])
|
||||
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
|
||||
webhook_mapping = models.ForeignKey(
|
||||
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
|
||||
)
|
||||
@ -439,7 +517,7 @@ class NotificationWebhookMapping(PropertyMapping):
|
||||
return "ak-property-mapping-notification-form"
|
||||
|
||||
@property
|
||||
def serializer(self) -> Type["Serializer"]:
|
||||
def serializer(self) -> type["Serializer"]:
|
||||
from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer
|
||||
|
||||
return NotificationWebhookMappingSerializer
|
||||
|
||||
@ -46,7 +46,7 @@ class TaskResult:
|
||||
|
||||
def with_error(self, exc: Exception) -> "TaskResult":
|
||||
"""Since errors might not always be pickle-able, set the traceback"""
|
||||
self.messages.extend(exception_to_string(exc).splitlines())
|
||||
self.messages.append(str(exc))
|
||||
return self
|
||||
|
||||
|
||||
@ -112,30 +112,6 @@ class TaskInfo:
|
||||
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
||||
|
||||
|
||||
def prefill_task():
|
||||
"""Ensure a task's details are always in cache, so it can always be triggered via API"""
|
||||
|
||||
def inner_wrap(func):
|
||||
status = TaskInfo.by_name(func.__name__)
|
||||
if status:
|
||||
return func
|
||||
TaskInfo(
|
||||
task_name=func.__name__,
|
||||
task_description=func.__doc__,
|
||||
result=TaskResult(TaskResultStatus.UNKNOWN, messages=[_("Task has not been run yet.")]),
|
||||
task_call_module=func.__module__,
|
||||
task_call_func=func.__name__,
|
||||
# We don't have real values for these attributes but they cannot be null
|
||||
start_timestamp=default_timer(),
|
||||
finish_timestamp=default_timer(),
|
||||
finish_time=datetime.now(),
|
||||
).save(86400)
|
||||
LOGGER.debug("prefilled task", task_name=func.__name__)
|
||||
return func
|
||||
|
||||
return inner_wrap
|
||||
|
||||
|
||||
class MonitoredTask(Task):
|
||||
"""Task which can save its state to the cache"""
|
||||
|
||||
@ -210,5 +186,21 @@ class MonitoredTask(Task):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
||||
def prefill_task(func):
|
||||
"""Ensure a task's details are always in cache, so it can always be triggered via API"""
|
||||
status = TaskInfo.by_name(func.__name__)
|
||||
if status:
|
||||
return func
|
||||
TaskInfo(
|
||||
task_name=func.__name__,
|
||||
task_description=func.__doc__,
|
||||
result=TaskResult(TaskResultStatus.UNKNOWN, messages=[_("Task has not been run yet.")]),
|
||||
task_call_module=func.__module__,
|
||||
task_call_func=func.__name__,
|
||||
# We don't have real values for these attributes but they cannot be null
|
||||
start_timestamp=default_timer(),
|
||||
finish_timestamp=default_timer(),
|
||||
finish_time=datetime.now(),
|
||||
).save(86400)
|
||||
LOGGER.debug("prefilled task", task_name=func.__name__)
|
||||
return func
|
||||
|
||||
@ -3,14 +3,14 @@ from threading import Thread
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
|
||||
from django.db.models.signals import post_save
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.http import HttpRequest
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.tasks import event_notification_handler
|
||||
from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
||||
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.stages.invitation.models import Invitation
|
||||
@ -108,3 +108,10 @@ def on_password_changed(sender, user: User, password: str, **_):
|
||||
def event_post_save_notification(sender, instance: Event, **_):
|
||||
"""Start task to check if any policies trigger an notification on this event"""
|
||||
event_notification_handler.delay(instance.event_uuid.hex)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=User)
|
||||
# pylint: disable=unused-argument
|
||||
def event_user_pre_delete_cleanup(sender, instance: User, **_):
|
||||
"""If gdpr_compliance is enabled, remove all the user's events"""
|
||||
gdpr_cleanup.delay(instance.pk)
|
||||
|
||||
@ -106,3 +106,11 @@ def notification_transport(self: MonitoredTask, notification_pk: int, transport_
|
||||
except NotificationTransportError as exc:
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
raise exc
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def gdpr_cleanup(user_pk: int):
|
||||
"""cleanup events from gdpr_compliance"""
|
||||
events = Event.objects.filter(user__pk=user_pk)
|
||||
LOGGER.debug("GDPR cleanup, removing events from user", events=events.count())
|
||||
events.delete()
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.events.models import (
|
||||
Event,
|
||||
EventAction,
|
||||
@ -17,7 +17,7 @@ class TestEventsAPI(APITestCase):
|
||||
"""Test Event API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_top_n(self):
|
||||
|
||||
@ -3,7 +3,8 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
|
||||
@ -12,7 +13,7 @@ class TestEventsMiddleware(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_create(self):
|
||||
|
||||
@ -41,6 +41,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"flow_set",
|
||||
]
|
||||
|
||||
@ -89,7 +90,7 @@ class StageViewSet(
|
||||
stages += list(configurable_stage.objects.all().order_by("name"))
|
||||
matching_stages: list[dict] = []
|
||||
for stage in stages:
|
||||
user_settings = stage.ui_user_settings
|
||||
user_settings = stage.ui_user_settings()
|
||||
if not user_settings:
|
||||
continue
|
||||
user_settings.initial_data["object_uid"] = str(stage.pk)
|
||||
|
||||
@ -72,7 +72,7 @@ class WithUserInfoChallenge(Challenge):
|
||||
pending_user_avatar = CharField()
|
||||
|
||||
|
||||
class AccessDeniedChallenge(Challenge):
|
||||
class AccessDeniedChallenge(WithUserInfoChallenge):
|
||||
"""Challenge when a flow's active stage calls `stage_invalid()`."""
|
||||
|
||||
error_message = CharField(required=False)
|
||||
|
||||
@ -1,11 +1,14 @@
|
||||
"""flow exceptions"""
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.policies.types import PolicyResult
|
||||
|
||||
|
||||
class FlowNonApplicableException(SentryIgnoredException):
|
||||
"""Flow does not apply to current user (denied by policy)."""
|
||||
|
||||
policy_result: PolicyResult
|
||||
|
||||
|
||||
class EmptyFlowException(SentryIgnoredException):
|
||||
"""Flow has no stages."""
|
||||
|
||||
@ -10,7 +10,7 @@ from django.test import RequestFactory
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||
|
||||
@ -68,7 +68,7 @@ class Command(BaseCommand): # pragma: no cover
|
||||
def benchmark_flows(self, proc_count):
|
||||
"""Get full recovery link"""
|
||||
flow = Flow.objects.get(slug="default-authentication-flow")
|
||||
user = User.objects.get(username="akadmin")
|
||||
user = create_test_admin_user()
|
||||
manager = Manager()
|
||||
return_dict = manager.dict()
|
||||
|
||||
|
||||
@ -10,8 +10,8 @@ def add_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
"default-invalidation-flow": "Default Invalidation Flow",
|
||||
"default-source-enrollment": "Welcome to authentik! Please select a username.",
|
||||
"default-source-authentication": "Welcome to authentik!",
|
||||
"default-provider-authorization-implicit-consent": "Default Provider Authorization Flow (implicit consent)",
|
||||
"default-provider-authorization-explicit-consent": "Default Provider Authorization Flow (explicit consent)",
|
||||
"default-provider-authorization-implicit-consent": "Redirecting to %(app)s",
|
||||
"default-provider-authorization-explicit-consent": "Redirecting to %(app)s",
|
||||
"default-password-change": "Change password",
|
||||
}
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
46
authentik/flows/migrations/0020_flowtoken.py
Normal file
46
authentik/flows/migrations/0020_flowtoken.py
Normal file
@ -0,0 +1,46 @@
|
||||
# Generated by Django 3.2.9 on 2021-12-05 13:50
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
|
||||
(
|
||||
"authentik_flows",
|
||||
"0019_alter_flow_background_squashed_0024_alter_flow_compatibility_mode",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FlowToken",
|
||||
fields=[
|
||||
(
|
||||
"token_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.token",
|
||||
),
|
||||
),
|
||||
("_plan", models.TextField()),
|
||||
(
|
||||
"flow",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="authentik_flows.flow"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Flow Token",
|
||||
"verbose_name_plural": "Flow Tokens",
|
||||
},
|
||||
bases=("authentik_core.token",),
|
||||
),
|
||||
]
|
||||
27
authentik/flows/migrations/0021_auto_20211227_2103.py
Normal file
27
authentik/flows/migrations/0021_auto_20211227_2103.py
Normal file
@ -0,0 +1,27 @@
|
||||
# Generated by Django 4.0 on 2021-12-27 21:03
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def update_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
slug_title_map = {
|
||||
"default-provider-authorization-implicit-consent": "Redirecting to %(app)s",
|
||||
"default-provider-authorization-explicit-consent": "Redirecting to %(app)s",
|
||||
}
|
||||
db_alias = schema_editor.connection.alias
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
for flow in Flow.objects.using(db_alias).all():
|
||||
if flow.slug not in slug_title_map:
|
||||
continue
|
||||
flow.title = slug_title_map[flow.slug]
|
||||
flow.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0020_flowtoken"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(update_title_for_defaults)]
|
||||
@ -1,5 +1,7 @@
|
||||
"""Flow models"""
|
||||
from typing import TYPE_CHECKING, Optional, Type
|
||||
from base64 import b64decode, b64encode
|
||||
from pickle import dumps, loads # nosec
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from django.db import models
|
||||
@ -9,11 +11,13 @@ from model_utils.managers import InheritanceManager
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Token
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.flows.planner import FlowPlan
|
||||
from authentik.flows.stage import StageView
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -59,7 +63,7 @@ class Stage(SerializerModel):
|
||||
objects = InheritanceManager()
|
||||
|
||||
@property
|
||||
def type(self) -> Type["StageView"]:
|
||||
def type(self) -> type["StageView"]:
|
||||
"""Return StageView class that implements logic for this stage"""
|
||||
# This is a bit of a workaround, since we can't set class methods with setattr
|
||||
if hasattr(self, "__in_memory_type"):
|
||||
@ -71,7 +75,6 @@ class Stage(SerializerModel):
|
||||
"""Return component used to edit this object"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
|
||||
"""Entrypoint to integrate with User settings. Can either return None if no
|
||||
user settings are available, or a challenge."""
|
||||
@ -83,7 +86,7 @@ class Stage(SerializerModel):
|
||||
return f"Stage {self.name}"
|
||||
|
||||
|
||||
def in_memory_stage(view: Type["StageView"]) -> Stage:
|
||||
def in_memory_stage(view: type["StageView"]) -> Stage:
|
||||
"""Creates an in-memory stage instance, based on a `view` as view."""
|
||||
stage = Stage()
|
||||
# Because we can't pickle a locally generated function,
|
||||
@ -260,3 +263,30 @@ class ConfigurableStage(models.Model):
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class FlowToken(Token):
|
||||
"""Subclass of a standard Token, stores the currently active flow plan upon creation.
|
||||
Can be used to later resume a flow."""
|
||||
|
||||
flow = models.ForeignKey(Flow, on_delete=models.CASCADE)
|
||||
_plan = models.TextField()
|
||||
|
||||
@staticmethod
|
||||
def pickle(plan) -> str:
|
||||
"""Pickle into string"""
|
||||
data = dumps(plan)
|
||||
return b64encode(data).decode()
|
||||
|
||||
@property
|
||||
def plan(self) -> "FlowPlan":
|
||||
"""Load Flow plan from pickled version"""
|
||||
return loads(b64decode(self._plan.encode())) # nosec
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Flow Token {super().__str__()}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Flow Token")
|
||||
verbose_name_plural = _("Flow Tokens")
|
||||
|
||||
@ -24,6 +24,9 @@ PLAN_CONTEXT_SSO = "is_sso"
|
||||
PLAN_CONTEXT_REDIRECT = "redirect"
|
||||
PLAN_CONTEXT_APPLICATION = "application"
|
||||
PLAN_CONTEXT_SOURCE = "source"
|
||||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||
# was restored.
|
||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||
GAUGE_FLOWS_CACHED = UpdatingGauge(
|
||||
"authentik_flows_cached",
|
||||
"Cached flows",
|
||||
@ -123,7 +126,9 @@ class FlowPlanner:
|
||||
) -> FlowPlan:
|
||||
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
||||
and return ordered list"""
|
||||
with Hub.current.start_span(op="flow.planner.plan") as span:
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.planner.plan", description=self.flow.slug
|
||||
) as span:
|
||||
span: Span
|
||||
span.set_data("flow", self.flow)
|
||||
span.set_data("request", request)
|
||||
@ -147,7 +152,9 @@ class FlowPlanner:
|
||||
engine.build()
|
||||
result = engine.result
|
||||
if not result.passing:
|
||||
raise FlowNonApplicableException(",".join(result.messages))
|
||||
exc = FlowNonApplicableException(",".join(result.messages))
|
||||
exc.policy_result = result
|
||||
raise exc
|
||||
# User is passing so far, check if we have a cached plan
|
||||
cached_plan_key = cache_key(self.flow, user)
|
||||
cached_plan = cache.get(cached_plan_key, None)
|
||||
@ -178,7 +185,8 @@ class FlowPlanner:
|
||||
"""Build flow plan by checking each stage in their respective
|
||||
order and checking the applied policies"""
|
||||
with Hub.current.start_span(
|
||||
op="flow.planner.build_plan"
|
||||
op="authentik.flow.planner.build_plan",
|
||||
description=self.flow.slug,
|
||||
) as span, HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time():
|
||||
span: Span
|
||||
span.set_data("flow", self.flow)
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""authentik stage Base view"""
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.http import HttpRequest
|
||||
from django.http.request import QueryDict
|
||||
@ -6,19 +8,24 @@ from django.http.response import HttpResponse
|
||||
from django.urls import reverse
|
||||
from django.views.generic.base import View
|
||||
from rest_framework.request import Request
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import DEFAULT_AVATAR, User
|
||||
from authentik.flows.challenge import (
|
||||
AccessDeniedChallenge,
|
||||
Challenge,
|
||||
ChallengeResponse,
|
||||
ChallengeTypes,
|
||||
ContextualFlowInfo,
|
||||
HttpChallengeResponse,
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.models import InvalidResponseAction
|
||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.views.executor import FlowExecutorView
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.flows.views.executor import FlowExecutorView
|
||||
|
||||
PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier"
|
||||
LOGGER = get_logger()
|
||||
@ -27,11 +34,11 @@ LOGGER = get_logger()
|
||||
class StageView(View):
|
||||
"""Abstract Stage, inherits TemplateView but can be combined with FormView"""
|
||||
|
||||
executor: FlowExecutorView
|
||||
executor: "FlowExecutorView"
|
||||
|
||||
request: HttpRequest = None
|
||||
|
||||
def __init__(self, executor: FlowExecutorView, **kwargs):
|
||||
def __init__(self, executor: "FlowExecutorView", **kwargs):
|
||||
self.executor = executor
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@ -42,6 +49,8 @@ class StageView(View):
|
||||
other things besides the form display.
|
||||
|
||||
If no user is pending, returns request.user"""
|
||||
if not self.executor.plan:
|
||||
return self.request.user
|
||||
if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context and for_display:
|
||||
return User(
|
||||
username=self.executor.plan.context.get(PLAN_CONTEXT_PENDING_USER_IDENTIFIER),
|
||||
@ -94,17 +103,31 @@ class ChallengeStageView(StageView):
|
||||
keep_context=keep_context,
|
||||
)
|
||||
return self.executor.restart_flow(keep_context)
|
||||
return self.challenge_invalid(challenge)
|
||||
return self.challenge_valid(challenge)
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.stage.challenge_invalid",
|
||||
description=self.__class__.__name__,
|
||||
):
|
||||
return self.challenge_invalid(challenge)
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.stage.challenge_valid",
|
||||
description=self.__class__.__name__,
|
||||
):
|
||||
return self.challenge_valid(challenge)
|
||||
|
||||
def format_title(self) -> str:
|
||||
"""Allow usage of placeholder in flow title."""
|
||||
if not self.executor.plan:
|
||||
return self.executor.flow.title
|
||||
return self.executor.flow.title % {
|
||||
"app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "")
|
||||
}
|
||||
|
||||
def _get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
challenge = self.get_challenge(*args, **kwargs)
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.stage.get_challenge",
|
||||
description=self.__class__.__name__,
|
||||
):
|
||||
challenge = self.get_challenge(*args, **kwargs)
|
||||
if "flow_info" not in challenge.initial_data:
|
||||
flow_info = ContextualFlowInfo(
|
||||
data={
|
||||
@ -149,10 +172,34 @@ class ChallengeStageView(StageView):
|
||||
)
|
||||
challenge_response.initial_data["response_errors"] = full_errors
|
||||
if not challenge_response.is_valid():
|
||||
LOGGER.warning(
|
||||
LOGGER.error(
|
||||
"f(ch): invalid challenge response",
|
||||
binding=self.executor.current_binding,
|
||||
errors=challenge_response.errors,
|
||||
stage_view=self,
|
||||
)
|
||||
return HttpChallengeResponse(challenge_response)
|
||||
|
||||
|
||||
class AccessDeniedChallengeView(ChallengeStageView):
|
||||
"""Used internally by FlowExecutor's stage_invalid()"""
|
||||
|
||||
error_message: Optional[str]
|
||||
|
||||
def __init__(self, executor: "FlowExecutorView", error_message: Optional[str] = None, **kwargs):
|
||||
super().__init__(executor, **kwargs)
|
||||
self.error_message = error_message
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
return AccessDeniedChallenge(
|
||||
data={
|
||||
"error_message": self.error_message or "Unknown error",
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-access-denied",
|
||||
}
|
||||
)
|
||||
|
||||
# This can never be reached since this challenge is created on demand and only the
|
||||
# .get() method is called
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: # pragma: no cover
|
||||
return self.executor.cancel()
|
||||
|
||||
@ -0,0 +1,51 @@
|
||||
"""Test helpers"""
|
||||
from json import loads
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.http.response import HttpResponse
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.flows.models import Flow
|
||||
|
||||
|
||||
class FlowTestCase(APITestCase):
|
||||
"""Helpers for testing flows and stages."""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def assertStageResponse(
|
||||
self,
|
||||
response: HttpResponse,
|
||||
flow: Optional[Flow] = None,
|
||||
user: Optional[User] = None,
|
||||
**kwargs,
|
||||
) -> dict[str, Any]:
|
||||
"""Assert various attributes of a stage response"""
|
||||
raw_response = loads(response.content.decode())
|
||||
self.assertIsNotNone(raw_response["component"])
|
||||
self.assertIsNotNone(raw_response["type"])
|
||||
if flow:
|
||||
self.assertIn("flow_info", raw_response)
|
||||
self.assertEqual(raw_response["flow_info"]["background"], flow.background_url)
|
||||
self.assertEqual(
|
||||
raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel")
|
||||
)
|
||||
# We don't check the flow title since it will most likely go
|
||||
# through ChallengeStageView.format_title() so might not match 1:1
|
||||
# self.assertEqual(raw_response["flow_info"]["title"], flow.title)
|
||||
self.assertIsNotNone(raw_response["flow_info"]["title"])
|
||||
if user:
|
||||
self.assertEqual(raw_response["pending_user"], user.username)
|
||||
self.assertEqual(raw_response["pending_user_avatar"], user.avatar)
|
||||
for key, expected in kwargs.items():
|
||||
self.assertEqual(raw_response[key], expected)
|
||||
return raw_response
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]:
|
||||
"""Wrapper around assertStageResponse that checks for a redirect"""
|
||||
return self.assertStageResponse(
|
||||
response, component="xak-flow-redirect", to=to, type=ChallengeTypes.REDIRECT.value
|
||||
)
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.api.stages import StageSerializer, StageViewSet
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
@ -32,7 +32,7 @@ class TestFlowsAPI(APITestCase):
|
||||
|
||||
def test_models(self):
|
||||
"""Test that ui_user_settings returns none"""
|
||||
self.assertIsNone(Stage().ui_user_settings)
|
||||
self.assertIsNone(Stage().ui_user_settings())
|
||||
|
||||
def test_api_serializer(self):
|
||||
"""Test that stage serializer returns the correct type"""
|
||||
@ -47,7 +47,7 @@ class TestFlowsAPI(APITestCase):
|
||||
|
||||
def test_api_diagram(self):
|
||||
"""Test flow diagram."""
|
||||
user = User.objects.get(username="akadmin")
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
|
||||
flow = Flow.objects.create(
|
||||
@ -77,7 +77,7 @@ class TestFlowsAPI(APITestCase):
|
||||
|
||||
def test_api_diagram_no_stages(self):
|
||||
"""Test flow diagram with no stages."""
|
||||
user = User.objects.get(username="akadmin")
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
|
||||
flow = Flow.objects.create(
|
||||
@ -93,7 +93,7 @@ class TestFlowsAPI(APITestCase):
|
||||
|
||||
def test_types(self):
|
||||
"""Test Stage's types endpoint"""
|
||||
user = User.objects.get(username="akadmin")
|
||||
user = create_test_admin_user()
|
||||
self.client.force_login(user)
|
||||
|
||||
response = self.client.get(
|
||||
|
||||
@ -4,16 +4,14 @@ from unittest.mock import MagicMock, PropertyMock, patch
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.test.client import RequestFactory
|
||||
from django.urls import reverse
|
||||
from django.utils.encoding import force_str
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction
|
||||
from authentik.flows.planner import FlowPlan, FlowPlanner
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
@ -37,7 +35,7 @@ def to_stage_response(request: HttpRequest, source: HttpResponse):
|
||||
TO_STAGE_RESPONSE_MOCK = MagicMock(side_effect=to_stage_response)
|
||||
|
||||
|
||||
class TestFlowExecutor(APITestCase):
|
||||
class TestFlowExecutor(FlowTestCase):
|
||||
"""Test executor"""
|
||||
|
||||
def setUp(self):
|
||||
@ -90,18 +88,11 @@ class TestFlowExecutor(APITestCase):
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "ak-stage-access-denied",
|
||||
"error_message": FlowNonApplicableException.__doc__,
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
},
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
flow=flow,
|
||||
error_message=FlowNonApplicableException.__doc__,
|
||||
component="ak-stage-access-denied",
|
||||
)
|
||||
|
||||
@patch(
|
||||
@ -283,14 +274,7 @@ class TestFlowExecutor(APITestCase):
|
||||
# We do this request without the patch, so the policy results in false
|
||||
response = self.client.post(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"to": reverse("authentik_core:root-redirect"),
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
},
|
||||
)
|
||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_reevaluate_keep(self):
|
||||
"""Test planner with re-evaluate (everything is kept)"""
|
||||
@ -360,14 +344,7 @@ class TestFlowExecutor(APITestCase):
|
||||
# We do this request without the patch, so the policy results in false
|
||||
response = self.client.post(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"to": reverse("authentik_core:root-redirect"),
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
},
|
||||
)
|
||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_reevaluate_remove_consecutive(self):
|
||||
"""Test planner with re-evaluate (consecutive stages are removed)"""
|
||||
@ -407,18 +384,7 @@ class TestFlowExecutor(APITestCase):
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-dummy",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertStageResponse(response, flow, component="ak-stage-dummy")
|
||||
|
||||
plan: FlowPlan = self.client.session[SESSION_KEY_PLAN]
|
||||
|
||||
@ -441,31 +407,13 @@ class TestFlowExecutor(APITestCase):
|
||||
# but it won't save it, hence we can't check the plan
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-dummy",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertStageResponse(response, flow, component="ak-stage-dummy")
|
||||
|
||||
# fourth request, this confirms the last stage (dummy4)
|
||||
# We do this request without the patch, so the policy results in false
|
||||
response = self.client.post(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "xak-flow-redirect",
|
||||
"to": reverse("authentik_core:root-redirect"),
|
||||
"type": ChallengeTypes.REDIRECT.value,
|
||||
},
|
||||
)
|
||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_stageview_user_identifier(self):
|
||||
"""Test PLAN_CONTEXT_PENDING_USER_IDENTIFIER"""
|
||||
@ -532,35 +480,16 @@ class TestFlowExecutor(APITestCase):
|
||||
# First request, run the planner
|
||||
response = self.client.get(exec_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-identification",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
"password_fields": False,
|
||||
"primary_action": "Log in",
|
||||
"sources": [],
|
||||
"show_source_labels": False,
|
||||
"user_fields": [UserFields.E_MAIL],
|
||||
},
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
flow,
|
||||
component="ak-stage-identification",
|
||||
password_fields=False,
|
||||
primary_action="Log in",
|
||||
sources=[],
|
||||
show_source_labels=False,
|
||||
user_fields=[UserFields.E_MAIL],
|
||||
)
|
||||
response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
force_str(response.content),
|
||||
{
|
||||
"component": "ak-stage-access-denied",
|
||||
"error_message": None,
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
"title": "",
|
||||
},
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
},
|
||||
)
|
||||
self.assertStageResponse(response, flow, component="ak-stage-access-denied")
|
||||
|
||||
@ -6,7 +6,7 @@ from django.test.client import RequestFactory
|
||||
from django.urls.base import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.challenge import ChallengeTypes
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction
|
||||
from authentik.stages.dummy.models import DummyStage
|
||||
@ -18,7 +18,7 @@ class TestFlowInspector(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.request_factory = RequestFactory()
|
||||
self.admin = User.objects.get(username="akadmin")
|
||||
self.admin = create_test_admin_user()
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
def test(self):
|
||||
@ -77,7 +77,7 @@ class TestFlowInspector(APITestCase):
|
||||
|
||||
self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
{"uid_field": "akadmin"},
|
||||
{"uid_field": self.admin.username},
|
||||
follow=True,
|
||||
)
|
||||
|
||||
@ -89,5 +89,5 @@ class TestFlowInspector(APITestCase):
|
||||
self.assertEqual(content["plans"][0]["current_stage"]["stage_obj"]["name"], "ident")
|
||||
self.assertEqual(content["current_plan"]["current_stage"]["stage_obj"]["name"], "dummy2")
|
||||
self.assertEqual(
|
||||
content["current_plan"]["plan_context"]["pending_user"]["username"], "akadmin"
|
||||
content["current_plan"]["plan_context"]["pending_user"]["username"], self.admin.username
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
"""base model tests"""
|
||||
from typing import Callable, Type
|
||||
from typing import Callable
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
@ -12,18 +12,18 @@ class TestModels(TestCase):
|
||||
"""Generic model properties tests"""
|
||||
|
||||
|
||||
def model_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||
def model_tester_factory(test_model: type[Stage]) -> Callable:
|
||||
"""Test a form"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
model_class = None
|
||||
if test_model._meta.abstract:
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
model_class = test_model.__bases__[0]()
|
||||
else:
|
||||
model_class = test_model()
|
||||
self.assertTrue(issubclass(model_class.type, StageView))
|
||||
self.assertIsNotNone(test_model.component)
|
||||
_ = test_model.ui_user_settings
|
||||
_ = model_class.ui_user_settings()
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
"""stage view tests"""
|
||||
from typing import Callable, Type
|
||||
from typing import Callable
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
@ -16,7 +16,7 @@ class TestViews(TestCase):
|
||||
self.exec = FlowExecutorView(request=self.factory.get("/"))
|
||||
|
||||
|
||||
def view_tester_factory(view_class: Type[StageView]) -> Callable:
|
||||
def view_tester_factory(view_class: type[StageView]) -> Callable:
|
||||
"""Test a form"""
|
||||
|
||||
def tester(self: TestViews):
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.flows.models import Flow, FlowDesignation
|
||||
from authentik.flows.planner import FlowPlan
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
@ -12,9 +13,8 @@ class TestHelperView(TestCase):
|
||||
|
||||
def test_default_view(self):
|
||||
"""Test that ToDefaultFlow returns the expected URL"""
|
||||
flow = Flow.objects.filter(
|
||||
designation=FlowDesignation.INVALIDATION,
|
||||
).first()
|
||||
Flow.objects.filter(designation=FlowDesignation.INVALIDATION).delete()
|
||||
flow = create_test_flow(FlowDesignation.INVALIDATION)
|
||||
response = self.client.get(
|
||||
reverse("authentik_flows:default-invalidation"),
|
||||
)
|
||||
@ -24,9 +24,8 @@ class TestHelperView(TestCase):
|
||||
|
||||
def test_default_view_invalid_plan(self):
|
||||
"""Test that ToDefaultFlow returns the expected URL (with an invalid plan)"""
|
||||
flow = Flow.objects.filter(
|
||||
designation=FlowDesignation.INVALIDATION,
|
||||
).first()
|
||||
Flow.objects.filter(designation=FlowDesignation.INVALIDATION).delete()
|
||||
flow = create_test_flow(FlowDesignation.INVALIDATION)
|
||||
plan = FlowPlan(flow_pk=flow.pk.hex + "aa")
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
from json import loads
|
||||
from typing import Any, Type
|
||||
from typing import Any
|
||||
|
||||
from dacite import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
@ -87,7 +87,7 @@ class FlowImporter:
|
||||
def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer:
|
||||
"""Validate a single entry"""
|
||||
model_app_label, model_name = entry.model.split(".")
|
||||
model: Type[SerializerModel] = apps.get_model(model_app_label, model_name)
|
||||
model: type[SerializerModel] = apps.get_model(model_app_label, model_name)
|
||||
if not isinstance(model(), ALLOWED_MODELS):
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
|
||||
|
||||
@ -10,7 +10,6 @@ from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect
|
||||
from django.http.request import QueryDict
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.template.response import TemplateResponse
|
||||
from django.urls.base import reverse
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||
from django.views.generic import View
|
||||
@ -19,12 +18,13 @@ from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer,
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.views import APIView
|
||||
from sentry_sdk import capture_exception
|
||||
from sentry_sdk.api import set_tag
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.core.models import USER_ATTRIBUTE_DEBUG
|
||||
from authentik.events.models import Event, EventAction, cleanse_dict
|
||||
from authentik.flows.challenge import (
|
||||
AccessDeniedChallenge,
|
||||
Challenge,
|
||||
ChallengeResponse,
|
||||
ChallengeTypes,
|
||||
@ -34,13 +34,22 @@ from authentik.flows.challenge import (
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||
from authentik.flows.models import ConfigurableStage, Flow, FlowDesignation, FlowStageBinding, Stage
|
||||
from authentik.flows.models import (
|
||||
ConfigurableStage,
|
||||
Flow,
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
FlowToken,
|
||||
Stage,
|
||||
)
|
||||
from authentik.flows.planner import (
|
||||
PLAN_CONTEXT_IS_RESTORED,
|
||||
PLAN_CONTEXT_PENDING_USER,
|
||||
PLAN_CONTEXT_REDIRECT,
|
||||
FlowPlan,
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import AccessDeniedChallengeView
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||
@ -53,7 +62,9 @@ NEXT_ARG_NAME = "next"
|
||||
SESSION_KEY_PLAN = "authentik_flows_plan"
|
||||
SESSION_KEY_APPLICATION_PRE = "authentik_flows_application_pre"
|
||||
SESSION_KEY_GET = "authentik_flows_get"
|
||||
SESSION_KEY_POST = "authentik_flows_post"
|
||||
SESSION_KEY_HISTORY = "authentik_flows_history"
|
||||
QS_KEY_TOKEN = "flow_token" # nosec
|
||||
|
||||
|
||||
def challenge_types():
|
||||
@ -116,6 +127,7 @@ class FlowExecutorView(APIView):
|
||||
super().setup(request, flow_slug=flow_slug)
|
||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||
set_tag("authentik.flow", self.flow.slug)
|
||||
|
||||
def handle_invalid_flow(self, exc: BaseException) -> HttpResponse:
|
||||
"""When a flow is non-applicable check if user is on the correct domain"""
|
||||
@ -126,71 +138,100 @@ class FlowExecutorView(APIView):
|
||||
message = exc.__doc__ if exc.__doc__ else str(exc)
|
||||
return self.stage_invalid(error_message=message)
|
||||
|
||||
def _check_flow_token(self, get_params: QueryDict):
|
||||
"""Check if the user is using a flow token to restore a plan"""
|
||||
tokens = FlowToken.filter_not_expired(key=get_params[QS_KEY_TOKEN])
|
||||
if not tokens.exists():
|
||||
return False
|
||||
token: FlowToken = tokens.first()
|
||||
try:
|
||||
plan = token.plan
|
||||
except (AttributeError, EOFError, ImportError, IndexError) as exc:
|
||||
LOGGER.warning("f(exec): Failed to restore token plan", exc=exc)
|
||||
finally:
|
||||
token.delete()
|
||||
if not isinstance(plan, FlowPlan):
|
||||
return None
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = True
|
||||
self._logger.debug("f(exec): restored flow plan from token", plan=plan)
|
||||
return plan
|
||||
|
||||
# pylint: disable=unused-argument, too-many-return-statements
|
||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||
# Early check if there's an active Plan for the current session
|
||||
if SESSION_KEY_PLAN in self.request.session:
|
||||
self.plan = self.request.session[SESSION_KEY_PLAN]
|
||||
if self.plan.flow_pk != self.flow.pk.hex:
|
||||
self._logger.warning(
|
||||
"f(exec): Found existing plan for other flow, deleting plan",
|
||||
)
|
||||
# Existing plan is deleted from session and instance
|
||||
self.plan = None
|
||||
self.cancel()
|
||||
self._logger.debug("f(exec): Continuing existing plan")
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.executor.dispatch", description=self.flow.slug
|
||||
) as span:
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
get_params = QueryDict(request.GET.get("query", ""))
|
||||
if QS_KEY_TOKEN in get_params:
|
||||
plan = self._check_flow_token(get_params)
|
||||
if plan:
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
# Early check if there's an active Plan for the current session
|
||||
if SESSION_KEY_PLAN in self.request.session:
|
||||
self.plan = self.request.session[SESSION_KEY_PLAN]
|
||||
if self.plan.flow_pk != self.flow.pk.hex:
|
||||
self._logger.warning(
|
||||
"f(exec): Found existing plan for other flow, deleting plan",
|
||||
)
|
||||
# Existing plan is deleted from session and instance
|
||||
self.plan = None
|
||||
self.cancel()
|
||||
self._logger.debug("f(exec): Continuing existing plan")
|
||||
|
||||
# Don't check session again as we've either already loaded the plan or we need to plan
|
||||
if not self.plan:
|
||||
request.session[SESSION_KEY_HISTORY] = []
|
||||
self._logger.debug("f(exec): No active Plan found, initiating planner")
|
||||
# Don't check session again as we've either already loaded the plan or we need to plan
|
||||
if not self.plan:
|
||||
request.session[SESSION_KEY_HISTORY] = []
|
||||
self._logger.debug("f(exec): No active Plan found, initiating planner")
|
||||
try:
|
||||
self.plan = self._initiate_plan()
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
||||
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
||||
except EmptyFlowException as exc:
|
||||
self._logger.warning("f(exec): Flow is empty", exc=exc)
|
||||
# To match behaviour with loading an empty flow plan from cache,
|
||||
# we don't show an error message here, but rather call _flow_done()
|
||||
return self._flow_done()
|
||||
# Initial flow request, check if we have an upstream query string passed in
|
||||
request.session[SESSION_KEY_GET] = get_params
|
||||
# We don't save the Plan after getting the next stage
|
||||
# as it hasn't been successfully passed yet
|
||||
try:
|
||||
self.plan = self._initiate_plan()
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
||||
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
||||
except EmptyFlowException as exc:
|
||||
self._logger.warning("f(exec): Flow is empty", exc=exc)
|
||||
# To match behaviour with loading an empty flow plan from cache,
|
||||
# we don't show an error message here, but rather call _flow_done()
|
||||
# This is the first time we actually access any attribute on the selected plan
|
||||
# if the cached plan is from an older version, it might have different attributes
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self._logger.warning(
|
||||
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||
)
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
return self.stage_invalid()
|
||||
if not next_binding:
|
||||
self._logger.debug("f(exec): no more stages, flow is done.")
|
||||
return self._flow_done()
|
||||
# Initial flow request, check if we have an upstream query string passed in
|
||||
request.session[SESSION_KEY_GET] = QueryDict(request.GET.get("query", ""))
|
||||
# We don't save the Plan after getting the next stage
|
||||
# as it hasn't been successfully passed yet
|
||||
try:
|
||||
# This is the first time we actually access any attribute on the selected plan
|
||||
# if the cached plan is from an older version, it might have different attributes
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
self._logger.warning("f(exec): found incompatible flow plan, invalidating run", exc=exc)
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
return self.stage_invalid()
|
||||
if not next_binding:
|
||||
self._logger.debug("f(exec): no more stages, flow is done.")
|
||||
return self._flow_done()
|
||||
self.current_binding = next_binding
|
||||
self.current_stage = next_binding.stage
|
||||
self._logger.debug(
|
||||
"f(exec): Current stage",
|
||||
current_stage=self.current_stage,
|
||||
flow_slug=self.flow.slug,
|
||||
)
|
||||
try:
|
||||
stage_cls = self.current_stage.type
|
||||
except NotImplementedError as exc:
|
||||
self._logger.debug("Error getting stage type", exc=exc)
|
||||
return self.stage_invalid()
|
||||
self.current_stage_view = stage_cls(self)
|
||||
self.current_stage_view.args = self.args
|
||||
self.current_stage_view.kwargs = self.kwargs
|
||||
self.current_stage_view.request = request
|
||||
try:
|
||||
return super().dispatch(request)
|
||||
except InvalidStageError as exc:
|
||||
return self.stage_invalid(str(exc))
|
||||
self.current_binding = next_binding
|
||||
self.current_stage = next_binding.stage
|
||||
self._logger.debug(
|
||||
"f(exec): Current stage",
|
||||
current_stage=self.current_stage,
|
||||
flow_slug=self.flow.slug,
|
||||
)
|
||||
try:
|
||||
stage_cls = self.current_stage.type
|
||||
except NotImplementedError as exc:
|
||||
self._logger.debug("Error getting stage type", exc=exc)
|
||||
return self.stage_invalid()
|
||||
self.current_stage_view = stage_cls(self)
|
||||
self.current_stage_view.args = self.args
|
||||
self.current_stage_view.kwargs = self.kwargs
|
||||
self.current_stage_view.request = request
|
||||
try:
|
||||
return super().dispatch(request)
|
||||
except InvalidStageError as exc:
|
||||
return self.stage_invalid(str(exc))
|
||||
|
||||
def handle_exception(self, exc: Exception) -> HttpResponse:
|
||||
"""Handle exception in stage execution"""
|
||||
@ -232,8 +273,15 @@ class FlowExecutorView(APIView):
|
||||
stage=self.current_stage,
|
||||
)
|
||||
try:
|
||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.executor.stage",
|
||||
description=class_to_path(self.current_stage_view.__class__),
|
||||
) as span:
|
||||
span.set_data("Method", "GET")
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
return self.handle_exception(exc)
|
||||
|
||||
@ -269,8 +317,15 @@ class FlowExecutorView(APIView):
|
||||
stage=self.current_stage,
|
||||
)
|
||||
try:
|
||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
with Hub.current.start_span(
|
||||
op="authentik.flow.executor.stage",
|
||||
description=class_to_path(self.current_stage_view.__class__),
|
||||
) as span:
|
||||
span.set_data("Method", "POST")
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
return self.handle_exception(exc)
|
||||
|
||||
@ -350,21 +405,9 @@ class FlowExecutorView(APIView):
|
||||
is a superuser."""
|
||||
self._logger.debug("f(exec): Stage invalid")
|
||||
self.cancel()
|
||||
response = HttpChallengeResponse(
|
||||
AccessDeniedChallenge(
|
||||
{
|
||||
"error_message": error_message,
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"component": "ak-stage-access-denied",
|
||||
"flow_info": {
|
||||
"title": self.flow.title,
|
||||
"background": self.flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
return to_stage_response(self.request, response)
|
||||
challenge_view = AccessDeniedChallengeView(self, error_message)
|
||||
challenge_view.request = self.request
|
||||
return to_stage_response(self.request, challenge_view.get(self.request))
|
||||
|
||||
def cancel(self):
|
||||
"""Cancel current execution and return a redirect"""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user