Compare commits
	
		
			465 Commits
		
	
	
		
			version/20
			...
			version/20
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| eaad564e23 | |||
| 511a94975b | |||
| 015810a2fd | |||
| e70e6b84c2 | |||
| d0b9c9a26f | |||
| 3e403fa348 | |||
| 48f4a971ef | |||
| 6314be14ad | |||
| 1a072c6c39 | |||
| ef2eed0bdf | |||
| 91227b1e96 | |||
| 67d68629da | |||
| e875db8f66 | |||
| 055a76393d | |||
| 0754821628 | |||
| fca88d9896 | |||
| dfe0404c51 | |||
| fa61696b46 | |||
| e5773738f4 | |||
| cac8539d79 | |||
| cf600f6f26 | |||
| 49dfb4756e | |||
| 814758e2aa | |||
| 5c42dac5e2 | |||
| 88603fa4f7 | |||
| 0232c4e162 | |||
| 11753c1fe1 | |||
| f5cc6c67ec | |||
| 8b8ed3527a | |||
| 1aa0274e7c | |||
| ecd33ca0c1 | |||
| e93be0de9a | |||
| a5adc4f8ed | |||
| a6baed9753 | |||
| ceaf832e63 | |||
| a6b0b14685 | |||
| f679250edd | |||
| acc4de2235 | |||
| 56a8276dbf | |||
| 6dfe6edbef | |||
| 6af4bd0d9a | |||
| 7ee7f6bd6a | |||
| f8b8334010 | |||
| d4b65dc4b4 | |||
| e4bbd3b1c0 | |||
| 87de5e625d | |||
| efbe51673e | |||
| a95bea53ea | |||
| 6021fc0f52 | |||
| 1415b68ff4 | |||
| be6853ac52 | |||
| 7fd6be5abb | |||
| 91d6f572a5 | |||
| 016a9ce34e | |||
| 8adb95af7f | |||
| 1dc54775d8 | |||
| 370ef716b5 | |||
| 16e56ad9ca | |||
| b5b5a9eed3 | |||
| 8b22e7bcc3 | |||
| d48b5b9511 | |||
| 0eccaa3f1e | |||
| 67d550a80d | |||
| ebb5711c32 | |||
| 79ec872232 | |||
| 4284e14ff7 | |||
| 92a09779d0 | |||
| 14c621631d | |||
| c55f503b9b | |||
| a908cad976 | |||
| c2586557d8 | |||
| 01c80a82e2 | |||
| 0d47654651 | |||
| 1183095833 | |||
| c281b11bdc | |||
| 61fe45a58c | |||
| d43aab479c | |||
| 7f8383427a | |||
| a06d6cf33d | |||
| 5b7cb205c9 | |||
| 293a932d20 | |||
| fff901ff03 | |||
| f47c936295 | |||
| 88d5aec618 | |||
| 96ae68cf09 | |||
| 63b3434b6f | |||
| 947ecec02b | |||
| 1c2b452406 | |||
| 47777529ac | |||
| 949095c376 | |||
| 4b112c2799 | |||
| 291a2516b1 | |||
| 4dcfd021e2 | |||
| ca50848db3 | |||
| 0bb3e3c558 | |||
| e4b25809ab | |||
| 7bf932f8e2 | |||
| 99d04528b0 | |||
| e48d172036 | |||
| c2388137a8 | |||
| 650e2cbc38 | |||
| b32800ea71 | |||
| e1c0c0b20c | |||
| fe39e39dcd | |||
| 883f213b03 | |||
| 538996f617 | |||
| 2f4c92deb9 | |||
| ef335ec083 | |||
| 07b09df3fe | |||
| e70e031a1f | |||
| c7ba183dc0 | |||
| 3ed23a37ea | |||
| 3d724db0e3 | |||
| 2997542114 | |||
| 84b18fff96 | |||
| 1dce408c72 | |||
| e5ff47bf14 | |||
| b53bf331c3 | |||
| 90e9a8b34c | |||
| 845f842783 | |||
| 7397849c60 | |||
| 6dd46b5fc5 | |||
| 89ca79ed10 | |||
| 713bef895c | |||
| 925115e9ce | |||
| 42f5cf8c93 | |||
| 82cc1d536a | |||
| 08af2fd46b | |||
| 70e3b27a4d | |||
| 6a411d7960 | |||
| 33567b56d7 | |||
| 0c1954aeb7 | |||
| f4a6c70e98 | |||
| 5f198e7fe4 | |||
| d172d32817 | |||
| af3fb5c2cd | |||
| 885efb526e | |||
| 3bfb8b2cb2 | |||
| 9fc5ff4b77 | |||
| dd8b579dd6 | |||
| e12cbd8711 | |||
| 62d35f8f8c | |||
| 49be504c13 | |||
| edad55e51d | |||
| 38086fa8bb | |||
| c4f9a3e9a7 | |||
| 930df791bd | |||
| 9a6086634c | |||
| b68e65355a | |||
| 72d33a91dd | |||
| 7067e3d69a | |||
| 4db370d24e | |||
| 41e7b9b73f | |||
| 7f47f93e4e | |||
| 89abd44b76 | |||
| 14c7d8c4f4 | |||
| 525976a81b | |||
| 64a2126ea4 | |||
| 994c5882ab | |||
| ad64d51e85 | |||
| a184a7518a | |||
| 943fd80920 | |||
| 01bb18b8c4 | |||
| 94baaaa5a5 | |||
| 40b164ce94 | |||
| 1d7c7801e7 | |||
| 0db0a12ef3 | |||
| 8008aba450 | |||
| eaeab27004 | |||
| 111fbf119b | |||
| 300ad88447 | |||
| 92cc0c9c64 | |||
| 18ff803370 | |||
| 819af78e2b | |||
| 6338785ce1 | |||
| 973e151dff | |||
| fae6d83f27 | |||
| ed84fe0b8d | |||
| 1ee603403e | |||
| 7db7b7cc4d | |||
| 68a98cd86c | |||
| e758db5727 | |||
| 4d7d700afa | |||
| f9a5add01d | |||
| 2986b56389 | |||
| 58f79b525d | |||
| 0a1c0dae05 | |||
| e18ef8dab6 | |||
| 3cacc59bec | |||
| 4eea46d399 | |||
| 11e25617bd | |||
| 4817126811 | |||
| 0181361efa | |||
| 8ff8e1d5f7 | |||
| 19d5902a92 | |||
| 71dffb21a9 | |||
| bd283c506d | |||
| ef564e5f1a | |||
| 2543224c7c | |||
| 077eee9310 | |||
| d894eeaa67 | |||
| 452bfb39bf | |||
| 6b6702521f | |||
| c07b8d95d0 | |||
| bf347730b3 | |||
| ececfc3a30 | |||
| b76546de0c | |||
| 424d490a60 | |||
| 127dd85214 | |||
| 10570ac7f8 | |||
| dc5667b0b8 | |||
| ec9cacb610 | |||
| 0027dbc0e5 | |||
| c15e4b24a1 | |||
| b6f518ffe6 | |||
| 4e476fd4e9 | |||
| 03503363e5 | |||
| 22d6621b02 | |||
| 0023df64c8 | |||
| 59a259e43a | |||
| c6f39f5eb4 | |||
| e3c0aad48a | |||
| 91dd33cee6 | |||
| 5a2c367e89 | |||
| 3b05c9cb1a | |||
| 6e53f1689d | |||
| e3be0f2550 | |||
| 294f2243c1 | |||
| 7b1373e8d6 | |||
| e70b486f20 | |||
| b90174f153 | |||
| 7d7acd8494 | |||
| 4d9d7c5efb | |||
| d614b3608d | |||
| beb2715fa7 | |||
| 5769ff45b5 | |||
| 9d6f79558f | |||
| 41d5bff9d3 | |||
| ec84ba9b6d | |||
| 042a62f99e | |||
| 907f02cfee | |||
| 53fe412bf9 | |||
| ef9e177fe9 | |||
| 28e675596b | |||
| 9b7f57cc75 | |||
| 935a8f4d58 | |||
| 01fcbb325b | |||
| 7d3d17acb9 | |||
| e434321f7c | |||
| ebd476be14 | |||
| 31ba543c62 | |||
| a101d48b5a | |||
| 4c166dcf52 | |||
| 47b1f025e1 | |||
| 8f44c792ac | |||
| e57b6f2347 | |||
| 275d0dfd03 | |||
| f18cbace7a | |||
| 212220554f | |||
| a596392bc3 | |||
| 3e22740eac | |||
| d18a691f63 | |||
| 3cd5e68bc1 | |||
| c741c13132 | |||
| 924f6f104a | |||
| 454594025b | |||
| e72097292c | |||
| ab17a12184 | |||
| 776f3f69a5 | |||
| 8560c7150a | |||
| 301386fb4a | |||
| 68e8b6990b | |||
| 4f800c4758 | |||
| 90c31c2214 | |||
| 50e3d317b2 | |||
| 3eed7bb010 | |||
| 0ef8edc9f1 | |||
| a6373ebb33 | |||
| bf8ce55eea | |||
| 61b4fcb5f3 | |||
| 81275e3bd1 | |||
| 7988bf7748 | |||
| 00d8eec360 | |||
| 82150c8e84 | |||
| 1dbd749a74 | |||
| a96479f16c | |||
| 5d5fb1f37e | |||
| b6f4d6a5eb | |||
| 8ab5c04c2c | |||
| 386944117e | |||
| 9154b9b85d | |||
| fc19372709 | |||
| e5d9c6537c | |||
| bf5cbac314 | |||
| 5cca637a3d | |||
| 5bfb8b454b | |||
| 4d96437972 | |||
| d03b0b8152 | |||
| c249b55ff5 | |||
| 1e1876b34c | |||
| a27493ad1b | |||
| 95b1ab820e | |||
| 5cf9f0002b | |||
| fc7a452b0c | |||
| 25ee0e4b45 | |||
| 46f12e62e8 | |||
| 4245dea25a | |||
| 908db3df81 | |||
| ef4f9aa437 | |||
| 902dd83c67 | |||
| 1c4b78b5f4 | |||
| d854d819d1 | |||
| f246da6b73 | |||
| 4a56b5e827 | |||
| 53b10e64f8 | |||
| 27e4c7027c | |||
| 410d1b97cd | |||
| f93f7e635b | |||
| 74eba04735 | |||
| 01bdaffe36 | |||
| f6b556713a | |||
| abe38bb16a | |||
| f2b8d45999 | |||
| 3f61dff1cb | |||
| b19da6d774 | |||
| 7c55616e29 | |||
| 952a7f07c1 | |||
| 6510b97c1e | |||
| 19b707a0fb | |||
| 320a600349 | |||
| 10110deae5 | |||
| 884c546f32 | |||
| abec906677 | |||
| 22d1dd801c | |||
| 03891cbe09 | |||
| 3c5157dfd4 | |||
| d241e8d51d | |||
| 7ba15884ed | |||
| 47356915b1 | |||
| 2520c92b78 | |||
| e7e0e6d213 | |||
| ca0250e19f | |||
| cf4c7c1bcb | |||
| 670af8789a | |||
| 5c5634830f | |||
| b6b0edb7ad | |||
| 45440abc80 | |||
| 9c42b75567 | |||
| e9a477c1eb | |||
| fa60655a5d | |||
| 5d729b4878 | |||
| 8692f7233f | |||
| 457e17fec3 | |||
| 87e99625e6 | |||
| 6f32eeea43 | |||
| dfcf8b2d40 | |||
| 846006f2e3 | |||
| f557b2129f | |||
| 6dc2003e34 | |||
| 0149c89003 | |||
| f458cae954 | |||
| f01d117ce6 | |||
| 2bde43e5dc | |||
| 84cc0b5490 | |||
| 2f3026084e | |||
| 89696edbee | |||
| c1f0833c09 | |||
| c77f804b77 | |||
| 8e83209631 | |||
| 2e48e0cc2f | |||
| e72f0ab160 | |||
| a3c681cc44 | |||
| 5b3a9e29fb | |||
| 15803dc67d | |||
| ff37e064c9 | |||
| ef8e922e2a | |||
| 34b11524f1 | |||
| 9e2492be5c | |||
| b3ba083ff0 | |||
| 22a8603892 | |||
| d83d058a4b | |||
| ec3fd4a3ab | |||
| 0764668b14 | |||
| 16b6c17305 | |||
| e60509697a | |||
| 85364af9e9 | |||
| cf4b4030aa | |||
| 74dc025869 | |||
| cabdc53553 | |||
| 29e9f399bd | |||
| dad43017a0 | |||
| 7fb939f97b | |||
| 88859b1c26 | |||
| c78236a2a2 | |||
| ba55538a34 | |||
| f742c73e24 | |||
| ca314c262c | |||
| b932b6c963 | |||
| 3c048a1921 | |||
| 8a60a7e26f | |||
| f10b57ba0b | |||
| e53114a645 | |||
| 2e50532518 | |||
| 1936ddfecb | |||
| 4afef46cb8 | |||
| 92b4244e81 | |||
| dfbf7027bc | |||
| eca2ef20d0 | |||
| cac5c7b3ea | |||
| 37ee555c8e | |||
| f910da0f8a | |||
| fc9d270992 | |||
| dcbc3d788a | |||
| 4658018a90 | |||
| 577b7ee515 | |||
| 621773c1ea | |||
| 3da526f20e | |||
| 052e465041 | |||
| c843f18743 | |||
| 80d0b14bb8 | |||
| 68637cf7cf | |||
| 82acba26af | |||
| ff8a812823 | |||
| 7f5fed2aea | |||
| a5c30fd9c7 | |||
| ef23a0da52 | |||
| ba527e7141 | |||
| 8edc254ab5 | |||
| 42627d21b0 | |||
| 2479b157d0 | |||
| 602573f83f | |||
| 20c33fa011 | |||
| 8599d9efe0 | |||
| 8e6fcfe350 | |||
| 558aa45201 | |||
| e9910732bc | |||
| 246dd4b062 | |||
| 4425f8d183 | |||
| c410bb8c36 | |||
| 44f62a4773 | |||
| b6ff04694f | |||
| d4ce0e8e41 | |||
| 362d72da8c | |||
| 88d0f8d8a8 | |||
| 61097b9400 | |||
| 7a73ddfb60 | |||
| d66f13c249 | |||
| 8cc3cb6a42 | |||
| 4c5537ddfe | |||
| a95779157d | |||
| 70256727fd | |||
| ac6afb2b82 | |||
| 2ea7bd86e8 | |||
| 95bce9c9e7 | |||
| 71a22c2a34 | |||
| f3eb85877d | |||
| 273f5211a0 | |||
| db06428ab9 | |||
| 109d8e48d4 | |||
| 2ca115285c | |||
| f5459645a5 | |||
| 14c159500d | |||
| 03da87991f | |||
| e38ee9c580 | |||
| 3bf53b2db1 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2021.12.1 | ||||
| current_version = 2022.1.5 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*) | ||||
| @ -17,7 +17,7 @@ values = | ||||
| 	beta | ||||
| 	stable | ||||
|  | ||||
| [bumpversion:file:website/docs/installation/docker-compose.md] | ||||
| [bumpversion:file:pyproject.toml] | ||||
|  | ||||
| [bumpversion:file:docker-compose.yml] | ||||
|  | ||||
| @ -30,7 +30,3 @@ values = | ||||
| [bumpversion:file:internal/constants/constants.go] | ||||
|  | ||||
| [bumpversion:file:web/src/constants.ts] | ||||
|  | ||||
| [bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md] | ||||
|  | ||||
| [bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md] | ||||
|  | ||||
							
								
								
									
										150
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										150
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,40 +33,36 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - uses: actions/setup-node@v2 | ||||
|         with: | ||||
|           node-version: '16' | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: scripts/ci_prepare.sh | ||||
|       - name: run pylint | ||||
|         run: pipenv run make ci-${{ matrix.job }} | ||||
|       - name: run job | ||||
|         run: poetry run make ci-${{ matrix.job }} | ||||
|   test-migrations: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: scripts/ci_prepare.sh | ||||
|       - name: run migrations | ||||
|         run: pipenv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-migrations-from-stable: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
| @ -74,71 +70,69 @@ jobs: | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - name: prepare variables | ||||
|         id: ev | ||||
|         run: | | ||||
|           python ./scripts/gh_env.py | ||||
|       - id: cache-pipenv | ||||
|           sudo pip install -U pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: checkout stable | ||||
|         run: | | ||||
|           # Copy current, latest config to local | ||||
|           cp authentik/lib/default.yml local.env.yml | ||||
|           cp -R .github .. | ||||
|           cp -R scripts .. | ||||
|           cp -R poetry.lock pyproject.toml .. | ||||
|           git checkout $(git describe --abbrev=0 --match 'version/*') | ||||
|           rm -rf .github/ scripts/ | ||||
|           mv ../.github ../scripts . | ||||
|           mv ../.github ../scripts ../poetry.lock ../pyproject.toml . | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: | | ||||
|           scripts/ci_prepare.sh | ||||
|           # Sync anyways since stable will have different dependencies | ||||
|           pipenv sync --dev | ||||
|           # install anyways since stable will have different dependencies | ||||
|           poetry install | ||||
|       - name: run migrations to stable | ||||
|         run: pipenv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|       - name: checkout current code | ||||
|         run: | | ||||
|           set -x | ||||
|           git fetch | ||||
|           git reset --hard HEAD | ||||
|           git checkout $GITHUB_HEAD_REF | ||||
|           pipenv sync --dev | ||||
|           git checkout $GITHUB_SHA | ||||
|           poetry install | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: scripts/ci_prepare.sh | ||||
|       - name: migrate to latest | ||||
|         run: pipenv run python -m lifecycle.migrate | ||||
|         run: poetry run python -m lifecycle.migrate | ||||
|   test-unittest: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: scripts/ci_prepare.sh | ||||
|       - uses: testspace-com/setup-testspace@v1 | ||||
|         with: | ||||
|           domain: ${{github.repository_owner}} | ||||
|       - name: run unittest | ||||
|         run: | | ||||
|           pipenv run make test | ||||
|           pipenv run coverage xml | ||||
|           poetry run make test | ||||
|           poetry run coverage xml | ||||
|       - name: run testspace | ||||
|         if: ${{ always() }} | ||||
|         run: | | ||||
| @ -150,16 +144,14 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: scripts/ci_prepare.sh | ||||
|       - uses: testspace-com/setup-testspace@v1 | ||||
|         with: | ||||
| @ -168,21 +160,19 @@ jobs: | ||||
|         uses: helm/kind-action@v1.2.0 | ||||
|       - name: run integration | ||||
|         run: | | ||||
|           pipenv run make test-integration | ||||
|           pipenv run coverage xml | ||||
|           poetry run make test-integration | ||||
|           poetry run coverage xml | ||||
|       - name: run testspace | ||||
|         if: ${{ always() }} | ||||
|         run: | | ||||
|           testspace [integration]unittest.xml --link=codecov | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v2 | ||||
|   test-e2e: | ||||
|   test-e2e-provider: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - uses: actions/setup-node@v2 | ||||
|         with: | ||||
|           node-version: '16' | ||||
| @ -191,14 +181,14 @@ jobs: | ||||
|       - uses: testspace-com/setup-testspace@v1 | ||||
|         with: | ||||
|           domain: ${{github.repository_owner}} | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: | | ||||
|           scripts/ci_prepare.sh | ||||
|           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||
| @ -215,12 +205,57 @@ jobs: | ||||
|           npm run build | ||||
|       - name: run e2e | ||||
|         run: | | ||||
|           pipenv run make test-e2e | ||||
|           pipenv run coverage xml | ||||
|           poetry run make test-e2e-provider | ||||
|           poetry run coverage xml | ||||
|       - name: run testspace | ||||
|         if: ${{ always() }} | ||||
|         run: | | ||||
|           testspace [e2e]unittest.xml --link=codecov | ||||
|           testspace [e2e-provider]unittest.xml --link=codecov | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v2 | ||||
|   test-e2e-rest: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|       - uses: actions/setup-node@v2 | ||||
|         with: | ||||
|           node-version: '16' | ||||
|           cache: 'npm' | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - uses: testspace-com/setup-testspace@v1 | ||||
|         with: | ||||
|           domain: ${{github.repository_owner}} | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: | | ||||
|           scripts/ci_prepare.sh | ||||
|           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||
|       - id: cache-web | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: web/dist | ||||
|           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }} | ||||
|       - name: prepare web ui | ||||
|         if: steps.cache-web.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           cd web | ||||
|           npm i | ||||
|           npm run build | ||||
|       - name: run e2e | ||||
|         run: | | ||||
|           poetry run make test-e2e-rest | ||||
|           poetry run coverage xml | ||||
|       - name: run testspace | ||||
|         if: ${{ always() }} | ||||
|         run: | | ||||
|           testspace [e2e-rest]unittest.xml --link=codecov | ||||
|       - if: ${{ always() }} | ||||
|         uses: codecov/codecov-action@v2 | ||||
|   ci-core-mark: | ||||
| @ -230,7 +265,8 @@ jobs: | ||||
|       - test-migrations-from-stable | ||||
|       - test-unittest | ||||
|       - test-integration | ||||
|       - test-e2e | ||||
|       - test-e2e-rest | ||||
|       - test-e2e-provider | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - run: echo mark | ||||
| @ -252,7 +288,7 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }} | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         run: | | ||||
|           python ./scripts/gh_env.py | ||||
|       - name: Login to Container Registry | ||||
|  | ||||
							
								
								
									
										60
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										60
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -17,7 +17,7 @@ jobs: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-go@v2 | ||||
|         with: | ||||
|           go-version: '^1.16.3' | ||||
|           go-version: "^1.17" | ||||
|       - name: Run linter | ||||
|         run: | | ||||
|           # Create folder structure for go embeds | ||||
| @ -28,11 +28,27 @@ jobs: | ||||
|             --rm \ | ||||
|             -v $(pwd):/app \ | ||||
|             -w /app \ | ||||
|             golangci/golangci-lint:v1.39.0 \ | ||||
|             golangci/golangci-lint:v1.43 \ | ||||
|             golangci-lint run -v --timeout 200s | ||||
|   test-unittest: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-go@v2 | ||||
|         with: | ||||
|           go-version: "^1.17" | ||||
|       - name: Get dependencies | ||||
|         run: | | ||||
|           go get github.com/axw/gocov/gocov | ||||
|           go get github.com/AlekSi/gocov-xml | ||||
|           go get github.com/jstemmer/go-junit-report | ||||
|       - name: Go unittests | ||||
|         run: | | ||||
|           go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | go-junit-report > junit.xml | ||||
|   ci-outpost-mark: | ||||
|     needs: | ||||
|       - lint-golint | ||||
|       - test-unittest | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - run: echo mark | ||||
| @ -58,7 +74,7 @@ jobs: | ||||
|       - name: prepare variables | ||||
|         id: ev | ||||
|         env: | ||||
|           DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }} | ||||
|           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||
|         run: | | ||||
|           python ./scripts/gh_env.py | ||||
|       - name: Login to Container Registry | ||||
| @ -80,3 +96,41 @@ jobs: | ||||
|           build-args: | | ||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||
|           platforms: ${{ matrix.arch }} | ||||
|   build-outpost-binary: | ||||
|     timeout-minutes: 120 | ||||
|     needs: | ||||
|       - ci-outpost-mark | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         type: | ||||
|           - proxy | ||||
|           - ldap | ||||
|         goos: [linux] | ||||
|         goarch: [amd64, arm64] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-go@v2 | ||||
|         with: | ||||
|           go-version: "^1.17" | ||||
|       - uses: actions/setup-node@v2 | ||||
|         with: | ||||
|           node-version: '16' | ||||
|           cache: 'npm' | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - name: Build web | ||||
|         run: | | ||||
|           cd web | ||||
|           npm install | ||||
|           npm run build-proxy | ||||
|       - name: Build outpost | ||||
|         run: | | ||||
|           set -x | ||||
|           export GOOS=${{ matrix.goos }} | ||||
|           export GOARCH=${{ matrix.goarch }} | ||||
|           go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} | ||||
|       - uses: actions/upload-artifact@v2 | ||||
|         with: | ||||
|           name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|  | ||||
							
								
								
									
										60
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										60
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -30,14 +30,14 @@ jobs: | ||||
|         with: | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik:2021.12.1, | ||||
|             beryju/authentik:2022.1.5, | ||||
|             beryju/authentik:latest, | ||||
|             ghcr.io/goauthentik/server:2021.12.1, | ||||
|             ghcr.io/goauthentik/server:2022.1.5, | ||||
|             ghcr.io/goauthentik/server:latest | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
|       - name: Building Docker Image (stable) | ||||
|         if: ${{ github.event_name == 'release' && !contains('2021.12.1', 'rc') }} | ||||
|         if: ${{ github.event_name == 'release' && !contains('2022.1.5', 'rc') }} | ||||
|         run: | | ||||
|           docker pull beryju/authentik:latest | ||||
|           docker tag beryju/authentik:latest beryju/authentik:stable | ||||
| @ -57,7 +57,7 @@ jobs: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-go@v2 | ||||
|         with: | ||||
|           go-version: "^1.15" | ||||
|           go-version: "^1.17" | ||||
|       - name: Set up QEMU | ||||
|         uses: docker/setup-qemu-action@v1.2.0 | ||||
|       - name: Set up Docker Buildx | ||||
| @ -78,14 +78,14 @@ jobs: | ||||
|         with: | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik-${{ matrix.type }}:2021.12.1, | ||||
|             beryju/authentik-${{ matrix.type }}:2022.1.5, | ||||
|             beryju/authentik-${{ matrix.type }}:latest, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:2021.12.1, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:2022.1.5, | ||||
|             ghcr.io/goauthentik/${{ matrix.type }}:latest | ||||
|           file: ${{ matrix.type }}.Dockerfile | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|       - name: Building Docker Image (stable) | ||||
|         if: ${{ github.event_name == 'release' && !contains('2021.12.1', 'rc') }} | ||||
|         if: ${{ github.event_name == 'release' && !contains('2022.1.5', 'rc') }} | ||||
|         run: | | ||||
|           docker pull beryju/authentik-${{ matrix.type }}:latest | ||||
|           docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable | ||||
| @ -93,10 +93,50 @@ jobs: | ||||
|           docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest | ||||
|           docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable | ||||
|           docker push ghcr.io/goauthentik/${{ matrix.type }}:stable | ||||
|   build-outpost-binary: | ||||
|     timeout-minutes: 120 | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         type: | ||||
|           - proxy | ||||
|           - ldap | ||||
|         goos: [linux, darwin] | ||||
|         goarch: [amd64, arm64] | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-go@v2 | ||||
|         with: | ||||
|           go-version: "^1.17" | ||||
|       - uses: actions/setup-node@v2 | ||||
|         with: | ||||
|           node-version: '16' | ||||
|           cache: 'npm' | ||||
|           cache-dependency-path: web/package-lock.json | ||||
|       - name: Build web | ||||
|         run: | | ||||
|           cd web | ||||
|           npm install | ||||
|           npm run build-proxy | ||||
|       - name: Build outpost | ||||
|         run: | | ||||
|           set -x | ||||
|           export GOOS=${{ matrix.goos }} | ||||
|           export GOARCH=${{ matrix.goarch }} | ||||
|           go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} | ||||
|       - name: Upload binaries to release | ||||
|         uses: svenstaro/upload-release-action@v2 | ||||
|         with: | ||||
|           repo_token: ${{ secrets.GITHUB_TOKEN }} | ||||
|           file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||
|           tag: ${{ github.ref }} | ||||
|   test-release: | ||||
|     needs: | ||||
|       - build-server | ||||
|       - build-outpost | ||||
|       - build-outpost-binary | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
| @ -110,7 +150,9 @@ jobs: | ||||
|           docker-compose run -u root server test | ||||
|   sentry-release: | ||||
|     needs: | ||||
|       - test-release | ||||
|       - build-server | ||||
|       - build-outpost | ||||
|       - build-outpost-binary | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
| @ -128,7 +170,7 @@ jobs: | ||||
|           SENTRY_PROJECT: authentik | ||||
|           SENTRY_URL: https://sentry.beryju.org | ||||
|         with: | ||||
|           version: authentik@2021.12.1 | ||||
|           version: authentik@2022.1.5 | ||||
|           environment: beryjuorg-prod | ||||
|           sourcemaps: './web/dist' | ||||
|           url_prefix: '~/static/dist' | ||||
|  | ||||
							
								
								
									
										12
									
								
								.github/workflows/translation-compile.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/translation-compile.yml
									
									
									
									
										vendored
									
									
								
							| @ -22,22 +22,20 @@ jobs: | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - uses: actions/setup-python@v2 | ||||
|         with: | ||||
|           python-version: '3.9' | ||||
|       - id: cache-pipenv | ||||
|       - id: cache-poetry | ||||
|         uses: actions/cache@v2.1.7 | ||||
|         with: | ||||
|           path: ~/.local/share/virtualenvs | ||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} | ||||
|           path: ~/.cache/pypoetry/virtualenvs | ||||
|           key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }} | ||||
|       - name: prepare | ||||
|         env: | ||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} | ||||
|           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||
|         run: | | ||||
|           sudo apt-get update | ||||
|           sudo apt-get install -y gettext | ||||
|           scripts/ci_prepare.sh | ||||
|       - name: run compile | ||||
|         run: pipenv run ./manage.py compilemessages | ||||
|         run: poetry run ./manage.py compilemessages | ||||
|       - name: Create Pull Request | ||||
|         uses: peter-evans/create-pull-request@v3 | ||||
|         id: cpr | ||||
|  | ||||
| @ -1 +0,0 @@ | ||||
| 3.9.7 | ||||
							
								
								
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,9 @@ | ||||
|         "saml", | ||||
|         "totp", | ||||
|         "webauthn", | ||||
|         "traefik" | ||||
|         "traefik", | ||||
|         "passwordless", | ||||
|         "kubernetes" | ||||
|     ], | ||||
|     "python.linting.pylintEnabled": true, | ||||
|     "todo-tree.tree.showCountsInTree": true, | ||||
|  | ||||
							
								
								
									
										41
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										41
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,16 +1,4 @@ | ||||
| # Stage 1: Lock python dependencies | ||||
| FROM docker.io/python:3.10.1-slim-bullseye as locker | ||||
|  | ||||
| COPY ./Pipfile /app/ | ||||
| COPY ./Pipfile.lock /app/ | ||||
|  | ||||
| WORKDIR /app/ | ||||
|  | ||||
| RUN pip install pipenv && \ | ||||
|     pipenv lock -r > requirements.txt && \ | ||||
|     pipenv lock -r --dev-only > requirements-dev.txt | ||||
|  | ||||
| # Stage 2: Build website | ||||
| # Stage 1: Build website | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:16 as website-builder | ||||
|  | ||||
| COPY ./website /work/website/ | ||||
| @ -18,7 +6,7 @@ COPY ./website /work/website/ | ||||
| ENV NODE_ENV=production | ||||
| RUN cd /work/website && npm i && npm run build-docs-only | ||||
|  | ||||
| # Stage 3: Build webui | ||||
| # Stage 2: Build webui | ||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:16 as web-builder | ||||
|  | ||||
| COPY ./web /work/web/ | ||||
| @ -27,8 +15,8 @@ COPY ./website /work/website/ | ||||
| ENV NODE_ENV=production | ||||
| RUN cd /work/web && npm i && npm run build | ||||
|  | ||||
| # Stage 4: Build go proxy | ||||
| FROM docker.io/golang:1.17.5-bullseye AS builder | ||||
| # Stage 3: Build go proxy | ||||
| FROM docker.io/golang:1.17.6-bullseye AS builder | ||||
|  | ||||
| WORKDIR /work | ||||
|  | ||||
| @ -43,29 +31,38 @@ COPY ./go.sum /work/go.sum | ||||
|  | ||||
| RUN go build -o /work/authentik ./cmd/server/main.go | ||||
|  | ||||
| # Stage 5: Run | ||||
| FROM docker.io/python:3.10.1-slim-bullseye | ||||
| # Stage 4: Run | ||||
| FROM docker.io/python:3.10.2-slim-bullseye | ||||
|  | ||||
| LABEL org.opencontainers.image.url https://goauthentik.io | ||||
| LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||
| LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||
|  | ||||
| WORKDIR / | ||||
| COPY --from=locker /app/requirements.txt / | ||||
| COPY --from=locker /app/requirements-dev.txt / | ||||
|  | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| COPY ./pyproject.toml / | ||||
| COPY ./poetry.lock / | ||||
|  | ||||
| RUN apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends \ | ||||
|         curl ca-certificates gnupg git runit libpq-dev \ | ||||
|         postgresql-client build-essential libxmlsec1-dev \ | ||||
|         pkg-config libmaxminddb0 && \ | ||||
|     pip install -r /requirements.txt --no-cache-dir && \ | ||||
|     pip install poetry && \ | ||||
|     poetry config virtualenvs.create false && \ | ||||
|     poetry install --no-dev && \ | ||||
|     rm -rf ~/.cache/pypoetry && \ | ||||
|     apt-get remove --purge -y build-essential git && \ | ||||
|     apt-get autoremove --purge -y && \ | ||||
|     apt-get clean && \ | ||||
|     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ | ||||
|     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ | ||||
|     mkdir -p /backups /certs /media && \ | ||||
|     chown authentik:authentik /backups /certs /media | ||||
|     mkdir -p /authentik/.ssh && \ | ||||
|     chown authentik:authentik /backups /certs /media /authentik/.ssh | ||||
|  | ||||
| COPY ./authentik/ /authentik | ||||
| COPY ./pyproject.toml / | ||||
|  | ||||
							
								
								
									
										27
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								Makefile
									
									
									
									
									
								
							| @ -9,8 +9,14 @@ all: lint-fix lint test gen web | ||||
| test-integration: | ||||
| 	coverage run manage.py test tests/integration | ||||
|  | ||||
| test-e2e: | ||||
| 	coverage run manage.py test tests/e2e | ||||
| test-e2e-provider: | ||||
| 	coverage run manage.py test tests/e2e/test_provider* | ||||
|  | ||||
| test-e2e-rest: | ||||
| 	coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source* | ||||
|  | ||||
| test-go: | ||||
| 	go test -timeout 0 -v -race -cover ./... | ||||
|  | ||||
| test: | ||||
| 	coverage run manage.py test authentik | ||||
| @ -32,6 +38,7 @@ lint-fix: | ||||
| lint: | ||||
| 	bandit -r authentik tests lifecycle -x node_modules | ||||
| 	pylint authentik tests lifecycle | ||||
| 	golangci-lint run -v | ||||
|  | ||||
| i18n-extract: i18n-extract-core web-extract | ||||
|  | ||||
| @ -102,20 +109,24 @@ web-extract: | ||||
| # These targets are use by GitHub actions to allow usage of matrix | ||||
| # which makes the YAML File a lot smaller | ||||
|  | ||||
| ci-pylint: | ||||
| ci--meta-debug: | ||||
| 	python -V | ||||
| 	node --version | ||||
|  | ||||
| ci-pylint: ci--meta-debug | ||||
| 	pylint authentik tests lifecycle | ||||
|  | ||||
| ci-black: | ||||
| ci-black: ci--meta-debug | ||||
| 	black --check authentik tests lifecycle | ||||
|  | ||||
| ci-isort: | ||||
| ci-isort: ci--meta-debug | ||||
| 	isort --check authentik tests lifecycle | ||||
|  | ||||
| ci-bandit: | ||||
| ci-bandit: ci--meta-debug | ||||
| 	bandit -r authentik tests lifecycle | ||||
|  | ||||
| ci-pyright: | ||||
| ci-pyright: ci--meta-debug | ||||
| 	pyright e2e lifecycle | ||||
|  | ||||
| ci-pending-migrations: | ||||
| ci-pending-migrations: ci--meta-debug | ||||
| 	./manage.py makemigrations --check | ||||
|  | ||||
							
								
								
									
										68
									
								
								Pipfile
									
									
									
									
									
								
							
							
						
						
									
										68
									
								
								Pipfile
									
									
									
									
									
								
							| @ -1,68 +0,0 @@ | ||||
| [[source]] | ||||
| name = "pypi" | ||||
| url = "https://pypi.org/simple" | ||||
| verify_ssl = true | ||||
|  | ||||
| [packages] | ||||
| boto3 = "*" | ||||
| celery = "*" | ||||
| channels = "*" | ||||
| channels-redis = "*" | ||||
| codespell = "*" | ||||
| colorama = "*" | ||||
| dacite = "*" | ||||
| deepmerge = "*" | ||||
| defusedxml = "*" | ||||
| django = "*" | ||||
| django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' } | ||||
| django-filter = "*" | ||||
| django-guardian = "*" | ||||
| django-model-utils = "*" | ||||
| django-otp = "*" | ||||
| django-prometheus = "*" | ||||
| django-redis = "*" | ||||
| django-storages = "*" | ||||
| djangorestframework = "*" | ||||
| djangorestframework-guardian = "*" | ||||
| docker = "*" | ||||
| drf-spectacular = "*" | ||||
| duo-client = "*" | ||||
| facebook-sdk = "*" | ||||
| geoip2 = "*" | ||||
| gunicorn = "*" | ||||
| kubernetes = "==v19.15.0" | ||||
| ldap3 = "*" | ||||
| lxml = "*" | ||||
| packaging = "*" | ||||
| psycopg2-binary = "*" | ||||
| pycryptodome = "*" | ||||
| pyjwt = "*" | ||||
| pyyaml = "*" | ||||
| requests-oauthlib = "*" | ||||
| sentry-sdk = { git = 'https://github.com/beryju/sentry-python.git', ref = '379aee28b15d3b87b381317746c4efd24b3d7bc3' } | ||||
| service_identity = "*" | ||||
| structlog = "*" | ||||
| swagger-spec-validator = "*" | ||||
| twisted = "==21.7.0" | ||||
| ua-parser = "*" | ||||
| urllib3 = {extras = ["secure"],version = "*"} | ||||
| uvicorn = {extras = ["standard"],version = "*"} | ||||
| webauthn = "*" | ||||
| xmlsec = "*" | ||||
| flower = "*" | ||||
| wsproto = "*" | ||||
|  | ||||
| [dev-packages] | ||||
| bandit = "*" | ||||
| black = "==21.11b1" | ||||
| bump2version = "*" | ||||
| colorama = "*" | ||||
| coverage = {extras = ["toml"],version = "*"} | ||||
| pylint = "*" | ||||
| pylint-django = "*" | ||||
| pytest = "*" | ||||
| pytest-django = "*" | ||||
| pytest-randomly = "*" | ||||
| requests-mock = "*" | ||||
| selenium = "*" | ||||
| importlib-metadata = "*" | ||||
							
								
								
									
										2505
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2505
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @ -57,4 +57,4 @@ DigitalOcean provides development and testing resources for authentik. | ||||
|     </a> | ||||
| </p> | ||||
|  | ||||
| Netlify hosts the [goauthentik.io](goauthentik.io) site. | ||||
| Netlify hosts the [goauthentik.io](https://goauthentik.io) site. | ||||
|  | ||||
| @ -6,8 +6,8 @@ | ||||
|  | ||||
| | Version    | Supported          | | ||||
| | ---------- | ------------------ | | ||||
| | 2021.9.x   | :white_check_mark: | | ||||
| | 2021.10.x  | :white_check_mark: | | ||||
| | 2021.12.x  | :white_check_mark: | | ||||
|  | ||||
| ## Reporting a Vulnerability | ||||
|  | ||||
|  | ||||
| @ -1,3 +1,19 @@ | ||||
| """authentik""" | ||||
| __version__ = "2021.12.1" | ||||
| from os import environ | ||||
| from typing import Optional | ||||
|  | ||||
| __version__ = "2022.1.5" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
|  | ||||
| def get_build_hash(fallback: Optional[str] = None) -> str: | ||||
|     """Get build hash""" | ||||
|     return environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "") | ||||
|  | ||||
|  | ||||
| def get_full_version() -> str: | ||||
|     """Get full version, with build hash appended""" | ||||
|     version = __version__ | ||||
|     if (build_hash := get_build_hash()) != "": | ||||
|         version += "." + build_hash | ||||
|     return version | ||||
|  | ||||
| @ -95,7 +95,7 @@ class TaskViewSet(ViewSet): | ||||
|                 _("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}), | ||||
|             ) | ||||
|             return Response(status=204) | ||||
|         except ImportError:  # pragma: no cover | ||||
|         except (ImportError, AttributeError):  # pragma: no cover | ||||
|             # if we get an import error, the module path has probably changed | ||||
|             task.delete() | ||||
|             return Response(status=500) | ||||
|  | ||||
| @ -1,6 +1,4 @@ | ||||
| """authentik administration overview""" | ||||
| from os import environ | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from drf_spectacular.utils import extend_schema | ||||
| from packaging.version import parse | ||||
| @ -10,7 +8,7 @@ from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.views import APIView | ||||
|  | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
|  | ||||
| @ -25,7 +23,7 @@ class VersionSerializer(PassiveSerializer): | ||||
|  | ||||
|     def get_build_hash(self, _) -> str: | ||||
|         """Get build hash, if version is not latest or released""" | ||||
|         return environ.get(ENV_GIT_HASH_KEY, "") | ||||
|         return get_build_hash() | ||||
|  | ||||
|     def get_version_current(self, _) -> str: | ||||
|         """Get current version""" | ||||
|  | ||||
| @ -1,4 +1,6 @@ | ||||
| """authentik admin app config""" | ||||
| from importlib import import_module | ||||
|  | ||||
| from django.apps import AppConfig | ||||
|  | ||||
|  | ||||
| @ -13,3 +15,4 @@ class AuthentikAdminConfig(AppConfig): | ||||
|         from authentik.admin.tasks import clear_update_notifications | ||||
|  | ||||
|         clear_update_notifications.delay() | ||||
|         import_module("authentik.admin.signals") | ||||
|  | ||||
							
								
								
									
										23
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,23 @@ | ||||
| """admin signals""" | ||||
| from django.dispatch import receiver | ||||
|  | ||||
| from authentik.admin.api.tasks import TaskInfo | ||||
| from authentik.admin.api.workers import GAUGE_WORKERS | ||||
| from authentik.root.celery import CELERY_APP | ||||
| from authentik.root.monitoring import monitoring_set | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| # pylint: disable=unused-argument | ||||
| def monitoring_set_workers(sender, **kwargs): | ||||
|     """Set worker gauge""" | ||||
|     count = len(CELERY_APP.control.ping(timeout=0.5)) | ||||
|     GAUGE_WORKERS.set(count) | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| # pylint: disable=unused-argument | ||||
| def monitoring_set_tasks(sender, **kwargs): | ||||
|     """Set task gauges""" | ||||
|     for task in TaskInfo.all().values(): | ||||
|         task.set_prom_metrics() | ||||
| @ -1,6 +1,5 @@ | ||||
| """authentik admin tasks""" | ||||
| import re | ||||
| from os import environ | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.core.validators import URLValidator | ||||
| @ -9,7 +8,7 @@ from prometheus_client import Info | ||||
| from requests import RequestException | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.events.models import Event, EventAction, Notification | ||||
| from authentik.events.monitored_tasks import ( | ||||
|     MonitoredTask, | ||||
| @ -36,7 +35,7 @@ def _set_prom_info(): | ||||
|         { | ||||
|             "version": __version__, | ||||
|             "latest": cache.get(VERSION_CACHE_KEY, ""), | ||||
|             "build_hash": environ.get(ENV_GIT_HASH_KEY, ""), | ||||
|             "build_hash": get_build_hash(), | ||||
|         } | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| """API Authentication""" | ||||
| from base64 import b64decode | ||||
| from binascii import Error | ||||
| from typing import Any, Optional, Union | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.conf import settings | ||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||
| @ -69,7 +69,7 @@ def token_secret_key(value: str) -> Optional[User]: | ||||
| class TokenAuthentication(BaseAuthentication): | ||||
|     """Token-based authentication using HTTP Bearer authentication""" | ||||
|  | ||||
|     def authenticate(self, request: Request) -> Union[tuple[User, Any], None]: | ||||
|     def authenticate(self, request: Request) -> tuple[User, Any] | None: | ||||
|         """Token-based authentication using HTTP Bearer authentication""" | ||||
|         auth = get_authorization_header(request) | ||||
|  | ||||
|  | ||||
| @ -30,7 +30,7 @@ function getCookie(name) { | ||||
| window.addEventListener('DOMContentLoaded', (event) => { | ||||
|     const rapidocEl = document.querySelector('rapi-doc'); | ||||
|     rapidocEl.addEventListener('before-try', (e) => { | ||||
|         e.detail.request.headers.append('X-CSRFToken', getCookie("authentik_csrf")); | ||||
|         e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf")); | ||||
|     }); | ||||
| }); | ||||
| </script> | ||||
|  | ||||
| @ -4,7 +4,5 @@ from django.urls import include, path | ||||
| from authentik.api.v3.urls import urlpatterns as v3_urls | ||||
|  | ||||
| urlpatterns = [ | ||||
|     # TODO: Remove in 2022.1 | ||||
|     path("v2beta/", include(v3_urls)), | ||||
|     path("v3/", include(v3_urls)), | ||||
| ] | ||||
|  | ||||
| @ -80,7 +80,7 @@ class ConfigView(APIView): | ||||
|         config = ConfigSerializer( | ||||
|             { | ||||
|                 "error_reporting": { | ||||
|                     "enabled": CONFIG.y("error_reporting.enabled"), | ||||
|                     "enabled": CONFIG.y("error_reporting.enabled") and not settings.DEBUG, | ||||
|                     "environment": CONFIG.y("error_reporting.environment"), | ||||
|                     "send_pii": CONFIG.y("error_reporting.send_pii"), | ||||
|                     "traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)), | ||||
|  | ||||
| @ -46,11 +46,7 @@ from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet | ||||
| from authentik.policies.expression.api import ExpressionPolicyViewSet | ||||
| from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet | ||||
| from authentik.policies.password.api import PasswordPolicyViewSet | ||||
| from authentik.policies.reputation.api import ( | ||||
|     IPReputationViewSet, | ||||
|     ReputationPolicyViewSet, | ||||
|     UserReputationViewSet, | ||||
| ) | ||||
| from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet | ||||
| from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet | ||||
| from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet | ||||
| from authentik.providers.oauth2.api.scope import ScopeMappingViewSet | ||||
| @ -151,8 +147,7 @@ router.register("policies/event_matcher", EventMatcherPolicyViewSet) | ||||
| router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet) | ||||
| router.register("policies/password_expiry", PasswordExpiryPolicyViewSet) | ||||
| router.register("policies/password", PasswordPolicyViewSet) | ||||
| router.register("policies/reputation/users", UserReputationViewSet) | ||||
| router.register("policies/reputation/ips", IPReputationViewSet) | ||||
| router.register("policies/reputation/scores", ReputationViewSet) | ||||
| router.register("policies/reputation", ReputationPolicyViewSet) | ||||
|  | ||||
| router.register("providers/all", ProviderViewSet) | ||||
|  | ||||
| @ -1,13 +1,16 @@ | ||||
| """Application API Views""" | ||||
| from typing import Optional | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.db.models import QuerySet | ||||
| from django.http.response import HttpResponseBadRequest | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils.functional import SimpleLazyObject | ||||
| from drf_spectacular.types import OpenApiTypes | ||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||
| from guardian.shortcuts import get_objects_for_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ReadOnlyField | ||||
| from rest_framework.fields import ReadOnlyField, SerializerMethodField | ||||
| from rest_framework.parsers import MultiPartParser | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| @ -39,11 +42,22 @@ def user_app_cache_key(user_pk: str) -> str: | ||||
| class ApplicationSerializer(ModelSerializer): | ||||
|     """Application Serializer""" | ||||
|  | ||||
|     launch_url = ReadOnlyField(source="get_launch_url") | ||||
|     launch_url = SerializerMethodField() | ||||
|     provider_obj = ProviderSerializer(source="get_provider", required=False) | ||||
|  | ||||
|     meta_icon = ReadOnlyField(source="get_meta_icon") | ||||
|  | ||||
|     def get_launch_url(self, app: Application) -> Optional[str]: | ||||
|         """Allow formatting of launch URL""" | ||||
|         url = app.get_launch_url() | ||||
|         if not url: | ||||
|             return url | ||||
|         user = self.context["request"].user | ||||
|         if isinstance(user, SimpleLazyObject): | ||||
|             user._setup() | ||||
|             user = user._wrapped | ||||
|         return url % user.__dict__ | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         model = Application | ||||
|  | ||||
| @ -1,10 +1,9 @@ | ||||
| """Tokens API Viewset""" | ||||
| from typing import Any | ||||
|  | ||||
| from django.http.response import Http404 | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from drf_spectacular.utils import OpenApiResponse, extend_schema | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
| from guardian.shortcuts import assign_perm, get_anonymous_user | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.exceptions import ValidationError | ||||
| from rest_framework.fields import CharField | ||||
| @ -96,10 +95,12 @@ class TokenViewSet(UsedByMixin, ModelViewSet): | ||||
|  | ||||
|     def perform_create(self, serializer: TokenSerializer): | ||||
|         if not self.request.user.is_superuser: | ||||
|             return serializer.save( | ||||
|             instance = serializer.save( | ||||
|                 user=self.request.user, | ||||
|                 expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True), | ||||
|             ) | ||||
|             assign_perm("authentik_core.view_token_key", self.request.user, instance) | ||||
|             return instance | ||||
|         return super().perform_create(serializer) | ||||
|  | ||||
|     @permission_required("authentik_core.view_token_key") | ||||
| @ -114,7 +115,5 @@ class TokenViewSet(UsedByMixin, ModelViewSet): | ||||
|     def view_key(self, request: Request, identifier: str) -> Response: | ||||
|         """Return token key and log access""" | ||||
|         token: Token = self.get_object() | ||||
|         if token.is_expired: | ||||
|             raise Http404 | ||||
|         Event.new(EventAction.SECRET_VIEW, secret=token).from_http(request)  # noqa # nosec | ||||
|         return Response(TokenViewSerializer({"key": token.key}).data) | ||||
|  | ||||
| @ -3,6 +3,7 @@ from datetime import timedelta | ||||
| from json import loads | ||||
| from typing import Optional | ||||
|  | ||||
| from django.contrib.auth import update_session_auth_hash | ||||
| from django.db.models.query import QuerySet | ||||
| from django.db.transaction import atomic | ||||
| from django.db.utils import IntegrityError | ||||
| @ -46,6 +47,7 @@ from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict | ||||
| from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_CHANGE_EMAIL, | ||||
|     USER_ATTRIBUTE_CHANGE_NAME, | ||||
|     USER_ATTRIBUTE_CHANGE_USERNAME, | ||||
|     USER_ATTRIBUTE_SA, | ||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||
| @ -134,6 +136,16 @@ class UserSelfSerializer(ModelSerializer): | ||||
|             raise ValidationError("Not allowed to change email.") | ||||
|         return email | ||||
|  | ||||
|     def validate_name(self, name: str): | ||||
|         """Check if the user is allowed to change their name""" | ||||
|         if self.instance.group_attributes().get( | ||||
|             USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True) | ||||
|         ): | ||||
|             return name | ||||
|         if name != self.instance.name: | ||||
|             raise ValidationError("Not allowed to change name.") | ||||
|         return name | ||||
|  | ||||
|     def validate_username(self, username: str): | ||||
|         """Check if the user is allowed to change their username""" | ||||
|         if self.instance.group_attributes().get( | ||||
| @ -144,6 +156,13 @@ class UserSelfSerializer(ModelSerializer): | ||||
|             raise ValidationError("Not allowed to change username.") | ||||
|         return username | ||||
|  | ||||
|     def save(self, **kwargs): | ||||
|         if self.instance: | ||||
|             attributes: dict = self.instance.attributes | ||||
|             attributes.update(self.validated_data.get("attributes", {})) | ||||
|             self.validated_data["attributes"] = attributes | ||||
|         return super().save(**kwargs) | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         model = User | ||||
| @ -359,6 +378,35 @@ class UserViewSet(UsedByMixin, ModelViewSet): | ||||
|             ).data | ||||
|         return Response(serializer.initial_data) | ||||
|  | ||||
|     @permission_required("authentik_core.reset_user_password") | ||||
|     @extend_schema( | ||||
|         request=inline_serializer( | ||||
|             "UserPasswordSetSerializer", | ||||
|             { | ||||
|                 "password": CharField(required=True), | ||||
|             }, | ||||
|         ), | ||||
|         responses={ | ||||
|             204: "", | ||||
|             400: "", | ||||
|         }, | ||||
|     ) | ||||
|     @action(detail=True, methods=["POST"]) | ||||
|     # pylint: disable=invalid-name, unused-argument | ||||
|     def set_password(self, request: Request, pk: int) -> Response: | ||||
|         """Set password for user""" | ||||
|         user: User = self.get_object() | ||||
|         try: | ||||
|             user.set_password(request.data.get("password")) | ||||
|             user.save() | ||||
|         except (ValidationError, IntegrityError) as exc: | ||||
|             LOGGER.debug("Failed to set password", exc=exc) | ||||
|             return Response(status=400) | ||||
|         if user.pk == request.user.pk and SESSION_IMPERSONATE_USER not in self.request.session: | ||||
|             LOGGER.debug("Updating session hash after password change") | ||||
|             update_session_auth_hash(self.request, user) | ||||
|         return Response(status=204) | ||||
|  | ||||
|     @extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)}) | ||||
|     @action( | ||||
|         methods=["PUT"], | ||||
|  | ||||
| @ -15,7 +15,6 @@ import authentik.lib.models | ||||
|  | ||||
|  | ||||
| def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
|     from django.core.cache import cache | ||||
|  | ||||
|  | ||||
| @ -12,7 +12,6 @@ import authentik.core.models | ||||
|  | ||||
|  | ||||
| def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
|     from django.core.cache import cache | ||||
|  | ||||
|  | ||||
| @ -1,12 +1,13 @@ | ||||
| """authentik core models""" | ||||
| from datetime import timedelta | ||||
| from hashlib import md5, sha256 | ||||
| from typing import Any, Optional, Type | ||||
| from typing import Any, Optional | ||||
| from urllib.parse import urlencode | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from deepmerge import always_merger | ||||
| from django.conf import settings | ||||
| from django.contrib.auth.hashers import check_password | ||||
| from django.contrib.auth.models import AbstractUser | ||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | ||||
| from django.db import models | ||||
| @ -38,6 +39,7 @@ USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account" | ||||
| USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources" | ||||
| USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires"  # nosec | ||||
| USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" | ||||
| USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name" | ||||
| USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email" | ||||
| USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips" | ||||
|  | ||||
| @ -160,6 +162,22 @@ class User(GuardianUserMixin, AbstractUser): | ||||
|         self.password_change_date = now() | ||||
|         return super().set_password(password) | ||||
|  | ||||
|     def check_password(self, raw_password: str) -> bool: | ||||
|         """ | ||||
|         Return a boolean of whether the raw_password was correct. Handles | ||||
|         hashing formats behind the scenes. | ||||
|  | ||||
|         Slightly changed version which doesn't send a signal for such internal hash upgrades | ||||
|         """ | ||||
|  | ||||
|         def setter(raw_password): | ||||
|             self.set_password(raw_password, signal=False) | ||||
|             # Password hash upgrades shouldn't be considered password changes. | ||||
|             self._password = None | ||||
|             self.save(update_fields=["password"]) | ||||
|  | ||||
|         return check_password(raw_password, self.password, setter) | ||||
|  | ||||
|     @property | ||||
|     def uid(self) -> str: | ||||
|         """Generate a globall unique UID, based on the user ID and the hashed secret key""" | ||||
| @ -224,7 +242,7 @@ class Provider(SerializerModel): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Type[Serializer]: | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         """Get serializer for this model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
| @ -270,8 +288,8 @@ class Application(PolicyBindingModel): | ||||
|         """Get launch URL if set, otherwise attempt to get launch URL based on provider.""" | ||||
|         if self.meta_launch_url: | ||||
|             return self.meta_launch_url | ||||
|         if self.provider: | ||||
|             return self.get_provider().launch_url | ||||
|         if provider := self.get_provider(): | ||||
|             return provider.launch_url | ||||
|         return None | ||||
|  | ||||
|     def get_provider(self) -> Optional[Provider]: | ||||
| @ -456,6 +474,14 @@ class Token(ManagedModel, ExpiringModel): | ||||
|         """Handler which is called when this object is expired.""" | ||||
|         from authentik.events.models import Event, EventAction | ||||
|  | ||||
|         if self.intent in [ | ||||
|             TokenIntents.INTENT_RECOVERY, | ||||
|             TokenIntents.INTENT_VERIFICATION, | ||||
|             TokenIntents.INTENT_APP_PASSWORD, | ||||
|         ]: | ||||
|             super().expire_action(*args, **kwargs) | ||||
|             return | ||||
|  | ||||
|         self.key = default_token_key() | ||||
|         self.expires = default_token_duration() | ||||
|         self.save(*args, **kwargs) | ||||
| @ -497,7 +523,7 @@ class PropertyMapping(SerializerModel, ManagedModel): | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Type[Serializer]: | ||||
|     def serializer(self) -> type[Serializer]: | ||||
|         """Get serializer for this model""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|  | ||||
| @ -1,6 +1,7 @@ | ||||
| """authentik core signals""" | ||||
| from typing import TYPE_CHECKING, Type | ||||
| from typing import TYPE_CHECKING | ||||
|  | ||||
| from django.apps import apps | ||||
| from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core.cache import cache | ||||
| @ -11,6 +12,8 @@ from django.dispatch import receiver | ||||
| from django.http.request import HttpRequest | ||||
| from prometheus_client import Gauge | ||||
|  | ||||
| from authentik.root.monitoring import monitoring_set | ||||
|  | ||||
| # Arguments: user: User, password: str | ||||
| password_changed = Signal() | ||||
|  | ||||
| @ -20,6 +23,17 @@ if TYPE_CHECKING: | ||||
|     from authentik.core.models import AuthenticatedSession, User | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| # pylint: disable=unused-argument | ||||
| def monitoring_set_models(sender, **kwargs): | ||||
|     """set models gauges""" | ||||
|     for model in apps.get_models(): | ||||
|         GAUGE_MODELS.labels( | ||||
|             model_name=model._meta.model_name, | ||||
|             app=model._meta.app_label, | ||||
|         ).set(model.objects.count()) | ||||
|  | ||||
|  | ||||
| @receiver(post_save) | ||||
| # pylint: disable=unused-argument | ||||
| def post_save_application(sender: type[Model], instance, created: bool, **_): | ||||
| @ -27,11 +41,6 @@ def post_save_application(sender: type[Model], instance, created: bool, **_): | ||||
|     from authentik.core.api.applications import user_app_cache_key | ||||
|     from authentik.core.models import Application | ||||
|  | ||||
|     GAUGE_MODELS.labels( | ||||
|         model_name=sender._meta.model_name, | ||||
|         app=sender._meta.app_label, | ||||
|     ).set(sender.objects.count()) | ||||
|  | ||||
|     if sender != Application: | ||||
|         return | ||||
|     if not created:  # pragma: no cover | ||||
| @ -62,7 +71,7 @@ def user_logged_out_session(sender, request: HttpRequest, user: "User", **_): | ||||
|  | ||||
|  | ||||
| @receiver(pre_delete) | ||||
| def authenticated_session_delete(sender: Type[Model], instance: "AuthenticatedSession", **_): | ||||
| def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||
|     """Delete session when authenticated session is deleted""" | ||||
|     from authentik.core.models import AuthenticatedSession | ||||
|  | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """Source decision helper""" | ||||
| from enum import Enum | ||||
| from typing import Any, Optional, Type | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.contrib import messages | ||||
| from django.db import IntegrityError | ||||
| @ -14,6 +14,7 @@ from structlog.stdlib import get_logger | ||||
| from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | ||||
| from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage | ||||
| from authentik.events.models import Event, EventAction | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| from authentik.flows.models import Flow, Stage, in_memory_stage | ||||
| from authentik.flows.planner import ( | ||||
|     PLAN_CONTEXT_PENDING_USER, | ||||
| @ -24,6 +25,8 @@ from authentik.flows.planner import ( | ||||
| ) | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN | ||||
| from authentik.lib.utils.urls import redirect_with_qs | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.types import PolicyResult | ||||
| from authentik.policies.utils import delete_none_keys | ||||
| from authentik.stages.password import BACKEND_INBUILT | ||||
| from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | ||||
| @ -50,7 +53,10 @@ class SourceFlowManager: | ||||
|  | ||||
|     identifier: str | ||||
|  | ||||
|     connection_type: Type[UserSourceConnection] = UserSourceConnection | ||||
|     connection_type: type[UserSourceConnection] = UserSourceConnection | ||||
|  | ||||
|     enroll_info: dict[str, Any] | ||||
|     policy_context: dict[str, Any] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
| @ -64,6 +70,7 @@ class SourceFlowManager: | ||||
|         self.identifier = identifier | ||||
|         self.enroll_info = enroll_info | ||||
|         self._logger = get_logger().bind(source=source, identifier=identifier) | ||||
|         self.policy_context = {} | ||||
|  | ||||
|     # pylint: disable=too-many-return-statements | ||||
|     def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]: | ||||
| @ -144,20 +151,23 @@ class SourceFlowManager: | ||||
|         except IntegrityError as exc: | ||||
|             self._logger.warning("failed to get action", exc=exc) | ||||
|             return redirect("/") | ||||
|         self._logger.debug("get_action() says", action=action, connection=connection) | ||||
|         if connection: | ||||
|             if action == Action.LINK: | ||||
|                 self._logger.debug("Linking existing user") | ||||
|                 return self.handle_existing_user_link(connection) | ||||
|             if action == Action.AUTH: | ||||
|                 self._logger.debug("Handling auth user") | ||||
|                 return self.handle_auth_user(connection) | ||||
|             if action == Action.ENROLL: | ||||
|                 self._logger.debug("Handling enrollment of new user") | ||||
|                 return self.handle_enroll(connection) | ||||
|         self._logger.debug("get_action", action=action, connection=connection) | ||||
|         try: | ||||
|             if connection: | ||||
|                 if action == Action.LINK: | ||||
|                     self._logger.debug("Linking existing user") | ||||
|                     return self.handle_existing_user_link(connection) | ||||
|                 if action == Action.AUTH: | ||||
|                     self._logger.debug("Handling auth user") | ||||
|                     return self.handle_auth_user(connection) | ||||
|                 if action == Action.ENROLL: | ||||
|                     self._logger.debug("Handling enrollment of new user") | ||||
|                     return self.handle_enroll(connection) | ||||
|         except FlowNonApplicableException as exc: | ||||
|             self._logger.warning("Flow non applicable", exc=exc) | ||||
|             return self.error_handler(exc, exc.policy_result) | ||||
|         # Default case, assume deny | ||||
|         messages.error( | ||||
|             self.request, | ||||
|         error = ( | ||||
|             _( | ||||
|                 ( | ||||
|                     "Request to authenticate with %(source)s has been denied. Please authenticate " | ||||
| @ -166,7 +176,17 @@ class SourceFlowManager: | ||||
|                 % {"source": self.source.name} | ||||
|             ), | ||||
|         ) | ||||
|         return redirect(reverse("authentik_core:root-redirect")) | ||||
|         return self.error_handler(error) | ||||
|  | ||||
|     def error_handler( | ||||
|         self, error: Exception, policy_result: Optional[PolicyResult] = None | ||||
|     ) -> HttpResponse: | ||||
|         """Handle any errors by returning an access denied stage""" | ||||
|         response = AccessDeniedResponse(self.request) | ||||
|         response.error_message = str(error) | ||||
|         if policy_result: | ||||
|             response.policy_result = policy_result | ||||
|         return response | ||||
|  | ||||
|     # pylint: disable=unused-argument | ||||
|     def get_stages_to_append(self, flow: Flow) -> list[Stage]: | ||||
| @ -179,7 +199,9 @@ class SourceFlowManager: | ||||
|             ] | ||||
|         return [] | ||||
|  | ||||
|     def _handle_login_flow(self, flow: Flow, **kwargs) -> HttpResponse: | ||||
|     def _handle_login_flow( | ||||
|         self, flow: Flow, connection: UserSourceConnection, **kwargs | ||||
|     ) -> HttpResponse: | ||||
|         """Prepare Authentication Plan, redirect user FlowExecutor""" | ||||
|         # Ensure redirect is carried through when user was trying to | ||||
|         # authorize application | ||||
| @ -193,8 +215,10 @@ class SourceFlowManager: | ||||
|                 PLAN_CONTEXT_SSO: True, | ||||
|                 PLAN_CONTEXT_SOURCE: self.source, | ||||
|                 PLAN_CONTEXT_REDIRECT: final_redirect, | ||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||
|             } | ||||
|         ) | ||||
|         kwargs.update(self.policy_context) | ||||
|         if not flow: | ||||
|             return HttpResponseBadRequest() | ||||
|         # We run the Flow planner here so we can pass the Pending user in the context | ||||
| @ -220,7 +244,7 @@ class SourceFlowManager: | ||||
|             _("Successfully authenticated with %(source)s!" % {"source": self.source.name}), | ||||
|         ) | ||||
|         flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user} | ||||
|         return self._handle_login_flow(self.source.authentication_flow, **flow_kwargs) | ||||
|         return self._handle_login_flow(self.source.authentication_flow, connection, **flow_kwargs) | ||||
|  | ||||
|     def handle_existing_user_link( | ||||
|         self, | ||||
| @ -264,8 +288,8 @@ class SourceFlowManager: | ||||
|             return HttpResponseBadRequest() | ||||
|         return self._handle_login_flow( | ||||
|             self.source.enrollment_flow, | ||||
|             connection, | ||||
|             **{ | ||||
|                 PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info), | ||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -6,7 +6,6 @@ from os import environ | ||||
| from boto3.exceptions import Boto3Error | ||||
| from botocore.exceptions import BotoCoreError, ClientError | ||||
| from dbbackup.db.exceptions import CommandConnectorError | ||||
| from django.conf import settings | ||||
| from django.contrib.humanize.templatetags.humanize import naturaltime | ||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||
| from django.core import management | ||||
| @ -63,8 +62,6 @@ def should_backup() -> bool: | ||||
|         return False | ||||
|     if not CONFIG.y_bool("postgresql.backup.enabled"): | ||||
|         return False | ||||
|     if settings.DEBUG: | ||||
|         return False | ||||
|     return True | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -5,6 +5,8 @@ | ||||
|  | ||||
| {% block head %} | ||||
| <script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script> | ||||
| <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||
| <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||
| {% endblock %} | ||||
|  | ||||
| {% block body %} | ||||
|  | ||||
| @ -5,6 +5,8 @@ | ||||
|  | ||||
| {% block head %} | ||||
| <script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||
| <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||
| {% endblock %} | ||||
|  | ||||
| {% block body %} | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| """Test Applications API""" | ||||
| from django.urls import reverse | ||||
| from django.utils.encoding import force_str | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import Application | ||||
| @ -14,7 +13,9 @@ class TestApplicationsAPI(APITestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.user = create_test_admin_user() | ||||
|         self.allowed = Application.objects.create(name="allowed", slug="allowed") | ||||
|         self.allowed = Application.objects.create( | ||||
|             name="allowed", slug="allowed", meta_launch_url="https://goauthentik.io/%(username)s" | ||||
|         ) | ||||
|         self.denied = Application.objects.create(name="denied", slug="denied") | ||||
|         PolicyBinding.objects.create( | ||||
|             target=self.denied, | ||||
| @ -32,7 +33,7 @@ class TestApplicationsAPI(APITestCase): | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual(force_str(response.content), {"messages": [], "passing": True}) | ||||
|         self.assertJSONEqual(response.content.decode(), {"messages": [], "passing": True}) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:application-check-access", | ||||
| @ -40,14 +41,14 @@ class TestApplicationsAPI(APITestCase): | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual(force_str(response.content), {"messages": ["dummy"], "passing": False}) | ||||
|         self.assertJSONEqual(response.content.decode(), {"messages": ["dummy"], "passing": False}) | ||||
|  | ||||
|     def test_list(self): | ||||
|         """Test list operation without superuser_full_list""" | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get(reverse("authentik_api:application-list")) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             response.content.decode(), | ||||
|             { | ||||
|                 "pagination": { | ||||
|                     "next": 0, | ||||
| @ -65,8 +66,8 @@ class TestApplicationsAPI(APITestCase): | ||||
|                         "slug": "allowed", | ||||
|                         "provider": None, | ||||
|                         "provider_obj": None, | ||||
|                         "launch_url": None, | ||||
|                         "meta_launch_url": "", | ||||
|                         "launch_url": f"https://goauthentik.io/{self.user.username}", | ||||
|                         "meta_launch_url": "https://goauthentik.io/%(username)s", | ||||
|                         "meta_icon": None, | ||||
|                         "meta_description": "", | ||||
|                         "meta_publisher": "", | ||||
| @ -83,7 +84,7 @@ class TestApplicationsAPI(APITestCase): | ||||
|             reverse("authentik_api:application-list") + "?superuser_full_list=true" | ||||
|         ) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             response.content.decode(), | ||||
|             { | ||||
|                 "pagination": { | ||||
|                     "next": 0, | ||||
| @ -101,8 +102,8 @@ class TestApplicationsAPI(APITestCase): | ||||
|                         "slug": "allowed", | ||||
|                         "provider": None, | ||||
|                         "provider_obj": None, | ||||
|                         "launch_url": None, | ||||
|                         "meta_launch_url": "", | ||||
|                         "launch_url": f"https://goauthentik.io/{self.user.username}", | ||||
|                         "meta_launch_url": "https://goauthentik.io/%(username)s", | ||||
|                         "meta_icon": None, | ||||
|                         "meta_description": "", | ||||
|                         "meta_publisher": "", | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
| from json import loads | ||||
|  | ||||
| from django.urls.base import reverse | ||||
| from django.utils.encoding import force_str | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| @ -28,5 +27,5 @@ class TestAuthenticatedSessionsAPI(APITestCase): | ||||
|         self.client.force_login(self.other_user) | ||||
|         response = self.client.get(reverse("authentik_api:authenticatedsession-list")) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(force_str(response.content)) | ||||
|         body = loads(response.content.decode()) | ||||
|         self.assertEqual(body["pagination"]["count"], 1) | ||||
|  | ||||
| @ -1,6 +1,6 @@ | ||||
| """authentik core models tests""" | ||||
| from time import sleep | ||||
| from typing import Callable, Type | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import RequestFactory, TestCase | ||||
| from django.utils.timezone import now | ||||
| @ -27,7 +27,7 @@ class TestModels(TestCase): | ||||
|         self.assertFalse(token.is_expired) | ||||
|  | ||||
|  | ||||
| def source_tester_factory(test_model: Type[Stage]) -> Callable: | ||||
| def source_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test source""" | ||||
|  | ||||
|     factory = RequestFactory() | ||||
| @ -47,7 +47,7 @@ def source_tester_factory(test_model: Type[Stage]) -> Callable: | ||||
|     return tester | ||||
|  | ||||
|  | ||||
| def provider_tester_factory(test_model: Type[Stage]) -> Callable: | ||||
| def provider_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test provider""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -6,8 +6,12 @@ from guardian.utils import get_anonymous_user | ||||
|  | ||||
| from authentik.core.models import SourceUserMatchingModes, User | ||||
| from authentik.core.sources.flow_manager import Action | ||||
| from authentik.flows.models import Flow, FlowDesignation | ||||
| from authentik.lib.generators import generate_id | ||||
| from authentik.lib.tests.utils import get_request | ||||
| from authentik.policies.denied import AccessDeniedResponse | ||||
| from authentik.policies.expression.models import ExpressionPolicy | ||||
| from authentik.policies.models import PolicyBinding | ||||
| from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection | ||||
| from authentik.sources.oauth.views.callback import OAuthSourceFlowManager | ||||
|  | ||||
| @ -17,7 +21,7 @@ class TestSourceFlowManager(TestCase): | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.source = OAuthSource.objects.create(name="test") | ||||
|         self.source: OAuthSource = OAuthSource.objects.create(name="test") | ||||
|         self.factory = RequestFactory() | ||||
|         self.identifier = generate_id() | ||||
|  | ||||
| @ -143,3 +147,34 @@ class TestSourceFlowManager(TestCase): | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|         flow_manager.get_flow() | ||||
|  | ||||
|     def test_error_non_applicable_flow(self): | ||||
|         """Test error handling when a source selected flow is non-applicable due to a policy""" | ||||
|         self.source.user_matching_mode = SourceUserMatchingModes.USERNAME_LINK | ||||
|  | ||||
|         flow = Flow.objects.create( | ||||
|             name="test", slug="test", title="test", designation=FlowDesignation.ENROLLMENT | ||||
|         ) | ||||
|         policy = ExpressionPolicy.objects.create( | ||||
|             name="false", expression="""ak_message("foo");return False""" | ||||
|         ) | ||||
|         PolicyBinding.objects.create( | ||||
|             policy=policy, | ||||
|             target=flow, | ||||
|             order=0, | ||||
|         ) | ||||
|         self.source.enrollment_flow = flow | ||||
|         self.source.save() | ||||
|  | ||||
|         flow_manager = OAuthSourceFlowManager( | ||||
|             self.source, | ||||
|             get_request("/", user=AnonymousUser()), | ||||
|             self.identifier, | ||||
|             {"username": "foo"}, | ||||
|         ) | ||||
|         action, _ = flow_manager.get_action() | ||||
|         self.assertEqual(action, Action.ENROLL) | ||||
|         response = flow_manager.get_flow() | ||||
|         self.assertIsInstance(response, AccessDeniedResponse) | ||||
|         # pylint: disable=no-member | ||||
|         self.assertEqual(response.error_message, "foo") | ||||
|  | ||||
| @ -30,6 +30,7 @@ class TestTokenAPI(APITestCase): | ||||
|         self.assertEqual(token.user, self.user) | ||||
|         self.assertEqual(token.intent, TokenIntents.INTENT_API) | ||||
|         self.assertEqual(token.expiring, True) | ||||
|         self.assertTrue(self.user.has_perm("authentik_core.view_token_key", token)) | ||||
|  | ||||
|     def test_token_create_invalid(self): | ||||
|         """Test token creation endpoint (invalid data)""" | ||||
| @ -54,7 +55,9 @@ class TestTokenAPI(APITestCase): | ||||
|  | ||||
|     def test_token_expire(self): | ||||
|         """Test Token expire task""" | ||||
|         token: Token = Token.objects.create(expires=now(), user=get_anonymous_user()) | ||||
|         token: Token = Token.objects.create( | ||||
|             expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API | ||||
|         ) | ||||
|         key = token.key | ||||
|         clean_expired_models.delay().get() | ||||
|         token.refresh_from_db() | ||||
|  | ||||
| @ -2,9 +2,15 @@ | ||||
| from django.urls.base import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import USER_ATTRIBUTE_CHANGE_EMAIL, USER_ATTRIBUTE_CHANGE_USERNAME, User | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_CHANGE_EMAIL, | ||||
|     USER_ATTRIBUTE_CHANGE_NAME, | ||||
|     USER_ATTRIBUTE_CHANGE_USERNAME, | ||||
|     User, | ||||
| ) | ||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant | ||||
| from authentik.flows.models import FlowDesignation | ||||
| from authentik.lib.generators import generate_key | ||||
| from authentik.stages.email.models import EmailStage | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| @ -18,11 +24,28 @@ class TestUsersAPI(APITestCase): | ||||
|  | ||||
|     def test_update_self(self): | ||||
|         """Test update_self""" | ||||
|         self.admin.attributes["foo"] = "bar" | ||||
|         self.admin.save() | ||||
|         self.admin.refresh_from_db() | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.put( | ||||
|             reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"} | ||||
|         ) | ||||
|         self.admin.refresh_from_db() | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertEqual(self.admin.attributes["foo"], "bar") | ||||
|         self.assertEqual(self.admin.username, "foo") | ||||
|         self.assertEqual(self.admin.name, "foo") | ||||
|  | ||||
|     def test_update_self_name_denied(self): | ||||
|         """Test update_self""" | ||||
|         self.admin.attributes[USER_ATTRIBUTE_CHANGE_NAME] = False | ||||
|         self.admin.save() | ||||
|         self.client.force_login(self.admin) | ||||
|         response = self.client.put( | ||||
|             reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"} | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 400) | ||||
|  | ||||
|     def test_update_self_username_denied(self): | ||||
|         """Test update_self""" | ||||
| @ -68,6 +91,18 @@ class TestUsersAPI(APITestCase): | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_set_password(self): | ||||
|         """Test Direct password set""" | ||||
|         self.client.force_login(self.admin) | ||||
|         new_pw = generate_key() | ||||
|         response = self.client.post( | ||||
|             reverse("authentik_api:user-set-password", kwargs={"pk": self.admin.pk}), | ||||
|             data={"password": new_pw}, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
|         self.admin.refresh_from_db() | ||||
|         self.assertTrue(self.admin.check_password(new_pw)) | ||||
|  | ||||
|     def test_recovery(self): | ||||
|         """Test user recovery link (no recovery flow set)""" | ||||
|         flow = create_test_flow(FlowDesignation.RECOVERY) | ||||
|  | ||||
| @ -29,3 +29,4 @@ class UserSettingSerializer(PassiveSerializer): | ||||
|     component = CharField() | ||||
|     title = CharField() | ||||
|     configure_url = CharField(required=False) | ||||
|     icon_url = CharField() | ||||
|  | ||||
| @ -1,4 +1,6 @@ | ||||
| """Crypto API Views""" | ||||
| from typing import Optional | ||||
|  | ||||
| from cryptography.hazmat.backends import default_backend | ||||
| from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||
| from cryptography.x509 import load_pem_x509_certificate | ||||
| @ -15,6 +17,7 @@ from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import ModelSerializer, ValidationError | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.decorators import permission_required | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| @ -24,6 +27,8 @@ from authentik.crypto.managed import MANAGED_KEY | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.events.models import Event, EventAction | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class CertificateKeyPairSerializer(ModelSerializer): | ||||
|     """CertificateKeyPair Serializer""" | ||||
| @ -31,6 +36,7 @@ class CertificateKeyPairSerializer(ModelSerializer): | ||||
|     cert_expiry = DateTimeField(source="certificate.not_valid_after", read_only=True) | ||||
|     cert_subject = SerializerMethodField() | ||||
|     private_key_available = SerializerMethodField() | ||||
|     private_key_type = SerializerMethodField() | ||||
|  | ||||
|     certificate_download_url = SerializerMethodField() | ||||
|     private_key_download_url = SerializerMethodField() | ||||
| @ -43,6 +49,13 @@ class CertificateKeyPairSerializer(ModelSerializer): | ||||
|         """Show if this keypair has a private key configured or not""" | ||||
|         return instance.key_data != "" and instance.key_data is not None | ||||
|  | ||||
|     def get_private_key_type(self, instance: CertificateKeyPair) -> Optional[str]: | ||||
|         """Get the private key's type, if set""" | ||||
|         key = instance.private_key | ||||
|         if key: | ||||
|             return key.__class__.__name__.replace("_", "").lower().replace("privatekey", "") | ||||
|         return None | ||||
|  | ||||
|     def get_certificate_download_url(self, instance: CertificateKeyPair) -> str: | ||||
|         """Get URL to download certificate""" | ||||
|         return ( | ||||
| @ -66,22 +79,30 @@ class CertificateKeyPairSerializer(ModelSerializer): | ||||
|     def validate_certificate_data(self, value: str) -> str: | ||||
|         """Verify that input is a valid PEM x509 Certificate""" | ||||
|         try: | ||||
|             load_pem_x509_certificate(value.encode("utf-8"), default_backend()) | ||||
|         except ValueError: | ||||
|             # Cast to string to fully load and parse certificate | ||||
|             # Prevents issues like https://github.com/goauthentik/authentik/issues/2082 | ||||
|             str(load_pem_x509_certificate(value.encode("utf-8"), default_backend())) | ||||
|         except ValueError as exc: | ||||
|             LOGGER.warning("Failed to load certificate", exc=exc) | ||||
|             raise ValidationError("Unable to load certificate.") | ||||
|         return value | ||||
|  | ||||
|     def validate_key_data(self, value: str) -> str: | ||||
|         """Verify that input is a valid PEM RSA Key""" | ||||
|         """Verify that input is a valid PEM Key""" | ||||
|         # Since this field is optional, data can be empty. | ||||
|         if value != "": | ||||
|             try: | ||||
|                 load_pem_private_key( | ||||
|                     str.encode("\n".join([x.strip() for x in value.split("\n")])), | ||||
|                     password=None, | ||||
|                     backend=default_backend(), | ||||
|                 # Cast to string to fully load and parse certificate | ||||
|                 # Prevents issues like https://github.com/goauthentik/authentik/issues/2082 | ||||
|                 str( | ||||
|                     load_pem_private_key( | ||||
|                         str.encode("\n".join([x.strip() for x in value.split("\n")])), | ||||
|                         password=None, | ||||
|                         backend=default_backend(), | ||||
|                     ) | ||||
|                 ) | ||||
|             except (ValueError, TypeError): | ||||
|             except (ValueError, TypeError) as exc: | ||||
|                 LOGGER.warning("Failed to load private key", exc=exc) | ||||
|                 raise ValidationError("Unable to load private key (possibly encrypted?).") | ||||
|         return value | ||||
|  | ||||
| @ -98,6 +119,7 @@ class CertificateKeyPairSerializer(ModelSerializer): | ||||
|             "cert_expiry", | ||||
|             "cert_subject", | ||||
|             "private_key_available", | ||||
|             "private_key_type", | ||||
|             "certificate_download_url", | ||||
|             "private_key_download_url", | ||||
|             "managed", | ||||
|  | ||||
| @ -44,7 +44,7 @@ class CertificateBuilder: | ||||
|         """Build self-signed certificate""" | ||||
|         one_day = datetime.timedelta(1, 0, 0) | ||||
|         self.__private_key = rsa.generate_private_key( | ||||
|             public_exponent=65537, key_size=2048, backend=default_backend() | ||||
|             public_exponent=65537, key_size=4096, backend=default_backend() | ||||
|         ) | ||||
|         self.__public_key = self.__private_key.public_key() | ||||
|         alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []] | ||||
|  | ||||
| @ -6,6 +6,11 @@ from uuid import uuid4 | ||||
|  | ||||
| from cryptography.hazmat.backends import default_backend | ||||
| from cryptography.hazmat.primitives import hashes | ||||
| from cryptography.hazmat.primitives.asymmetric.ec import ( | ||||
|     EllipticCurvePrivateKey, | ||||
|     EllipticCurvePublicKey, | ||||
| ) | ||||
| from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey, Ed25519PublicKey | ||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey | ||||
| from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||
| from cryptography.x509 import Certificate, load_pem_x509_certificate | ||||
| @ -36,8 +41,8 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | ||||
|     ) | ||||
|  | ||||
|     _cert: Optional[Certificate] = None | ||||
|     _private_key: Optional[RSAPrivateKey] = None | ||||
|     _public_key: Optional[RSAPublicKey] = None | ||||
|     _private_key: Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey] = None | ||||
|     _public_key: Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey] = None | ||||
|  | ||||
|     @property | ||||
|     def certificate(self) -> Certificate: | ||||
| @ -49,14 +54,16 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | ||||
|         return self._cert | ||||
|  | ||||
|     @property | ||||
|     def public_key(self) -> Optional[RSAPublicKey]: | ||||
|     def public_key(self) -> Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey]: | ||||
|         """Get public key of the private key""" | ||||
|         if not self._public_key: | ||||
|             self._public_key = self.private_key.public_key() | ||||
|         return self._public_key | ||||
|  | ||||
|     @property | ||||
|     def private_key(self) -> Optional[RSAPrivateKey]: | ||||
|     def private_key( | ||||
|         self, | ||||
|     ) -> Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey]: | ||||
|         """Get python cryptography PrivateKey instance""" | ||||
|         if not self._private_key and self.key_data != "": | ||||
|             try: | ||||
|  | ||||
| @ -24,7 +24,7 @@ MANAGED_DISCOVERED = "goauthentik.io/crypto/discovered/%s" | ||||
|  | ||||
|  | ||||
| def ensure_private_key_valid(body: str): | ||||
|     """Attempt loading of an RSA Private key without password""" | ||||
|     """Attempt loading of a PEM Private key without password""" | ||||
|     load_pem_private_key( | ||||
|         str.encode("\n".join([x.strip() for x in body.split("\n")])), | ||||
|         password=None, | ||||
| @ -42,7 +42,7 @@ def ensure_certificate_valid(body: str): | ||||
| @CELERY_APP.task(bind=True, base=MonitoredTask) | ||||
| @prefill_task | ||||
| def certificate_discovery(self: MonitoredTask): | ||||
|     """Discover and update certificates form the filesystem""" | ||||
|     """Discover, import and update certificates from the filesystem""" | ||||
|     certs = {} | ||||
|     private_keys = {} | ||||
|     discovered = 0 | ||||
| @ -52,6 +52,9 @@ def certificate_discovery(self: MonitoredTask): | ||||
|             continue | ||||
|         if path.is_dir(): | ||||
|             continue | ||||
|         # For certbot setups, we want to ignore archive. | ||||
|         if "archive" in file: | ||||
|             continue | ||||
|         # Support certbot's directory structure | ||||
|         if path.name in ["fullchain.pem", "privkey.pem"]: | ||||
|             cert_name = path.parent.name | ||||
| @ -60,7 +63,7 @@ def certificate_discovery(self: MonitoredTask): | ||||
|         try: | ||||
|             with open(path, "r+", encoding="utf-8") as _file: | ||||
|                 body = _file.read() | ||||
|                 if "BEGIN RSA PRIVATE KEY" in body: | ||||
|                 if "PRIVATE KEY" in body: | ||||
|                     private_keys[cert_name] = ensure_private_key_valid(body) | ||||
|                 else: | ||||
|                     certs[cert_name] = ensure_certificate_valid(body) | ||||
| @ -79,7 +82,7 @@ def certificate_discovery(self: MonitoredTask): | ||||
|             cert.certificate_data = cert_data | ||||
|             dirty = True | ||||
|         if name in private_keys: | ||||
|             if cert.key_data == private_keys[name]: | ||||
|             if cert.key_data != private_keys[name]: | ||||
|                 cert.key_data = private_keys[name] | ||||
|                 dirty = True | ||||
|         if dirty: | ||||
|  | ||||
| @ -146,7 +146,7 @@ class TestCrypto(APITestCase): | ||||
|             client_secret=generate_key(), | ||||
|             authorization_flow=create_test_flow(), | ||||
|             redirect_uris="http://localhost", | ||||
|             rsa_key=keypair, | ||||
|             signing_key=keypair, | ||||
|         ) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
| @ -191,9 +191,12 @@ class TestCrypto(APITestCase): | ||||
|             with CONFIG.patch("cert_discovery_dir", temp_dir): | ||||
|                 # pyright: reportGeneralTypeIssues=false | ||||
|                 certificate_discovery()  # pylint: disable=no-value-for-parameter | ||||
|         self.assertTrue( | ||||
|             CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo").exists() | ||||
|         ) | ||||
|         keypair: CertificateKeyPair = CertificateKeyPair.objects.filter( | ||||
|             managed=MANAGED_DISCOVERED % "foo" | ||||
|         ).first() | ||||
|         self.assertIsNotNone(keypair) | ||||
|         self.assertIsNotNone(keypair.certificate) | ||||
|         self.assertIsNotNone(keypair.private_key) | ||||
|         self.assertTrue( | ||||
|             CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists() | ||||
|         ) | ||||
|  | ||||
| @ -15,12 +15,14 @@ from authentik.api.decorators import permission_required | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import PassiveSerializer | ||||
| from authentik.events.models import ( | ||||
|     Event, | ||||
|     Notification, | ||||
|     NotificationSeverity, | ||||
|     NotificationTransport, | ||||
|     NotificationTransportError, | ||||
|     TransportMode, | ||||
| ) | ||||
| from authentik.events.utils import get_user | ||||
|  | ||||
|  | ||||
| class NotificationTransportSerializer(ModelSerializer): | ||||
| @ -86,6 +88,12 @@ class NotificationTransportViewSet(UsedByMixin, ModelViewSet): | ||||
|             severity=NotificationSeverity.NOTICE, | ||||
|             body=f"Test Notification from transport {transport.name}", | ||||
|             user=request.user, | ||||
|             event=Event( | ||||
|                 action="Test", | ||||
|                 user=get_user(request.user), | ||||
|                 app=self.__class__.__module__, | ||||
|                 context={"foo": "bar"}, | ||||
|             ), | ||||
|         ) | ||||
|         try: | ||||
|             response = NotificationTransportTestSerializer( | ||||
|  | ||||
| @ -35,12 +35,11 @@ class GeoIPReader: | ||||
|  | ||||
|     def __open(self): | ||||
|         """Get GeoIP Reader, if configured, otherwise none""" | ||||
|         path = CONFIG.y("authentik.geoip") | ||||
|         path = CONFIG.y("geoip") | ||||
|         if path == "" or not path: | ||||
|             return | ||||
|         try: | ||||
|             reader = Reader(path) | ||||
|             self.__reader = reader | ||||
|             self.__reader = Reader(path) | ||||
|             self.__last_mtime = stat(path).st_mtime | ||||
|             LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime) | ||||
|         except OSError as exc: | ||||
|  | ||||
| @ -19,7 +19,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     Event = apps.get_model("authentik_events", "Event") | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     for event in Event.objects.all(): | ||||
|     for event in Event.objects.using(db_alias).all(): | ||||
|         event.delete() | ||||
|         # Because event objects cannot be updated, we have to re-create them | ||||
|         event.pk = None | ||||
|  | ||||
| @ -10,7 +10,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     Event = apps.get_model("authentik_events", "Event") | ||||
|  | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     for event in Event.objects.all(): | ||||
|     for event in Event.objects.using(db_alias).all(): | ||||
|         event.delete() | ||||
|         # Because event objects cannot be updated, we have to re-create them | ||||
|         event.pk = None | ||||
|  | ||||
| @ -4,7 +4,7 @@ from collections import Counter | ||||
| from datetime import timedelta | ||||
| from inspect import currentframe | ||||
| from smtplib import SMTPException | ||||
| from typing import TYPE_CHECKING, Optional, Type, Union | ||||
| from typing import TYPE_CHECKING, Optional | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.conf import settings | ||||
| @ -190,7 +190,7 @@ class Event(ExpiringModel): | ||||
|  | ||||
|     @staticmethod | ||||
|     def new( | ||||
|         action: Union[str, EventAction], | ||||
|         action: str | EventAction, | ||||
|         app: Optional[str] = None, | ||||
|         **kwargs, | ||||
|     ) -> "Event": | ||||
| @ -517,7 +517,7 @@ class NotificationWebhookMapping(PropertyMapping): | ||||
|         return "ak-property-mapping-notification-form" | ||||
|  | ||||
|     @property | ||||
|     def serializer(self) -> Type["Serializer"]: | ||||
|     def serializer(self) -> type["Serializer"]: | ||||
|         from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer | ||||
|  | ||||
|         return NotificationWebhookMappingSerializer | ||||
|  | ||||
| @ -72,7 +72,7 @@ class WithUserInfoChallenge(Challenge): | ||||
|     pending_user_avatar = CharField() | ||||
|  | ||||
|  | ||||
| class AccessDeniedChallenge(Challenge): | ||||
| class AccessDeniedChallenge(WithUserInfoChallenge): | ||||
|     """Challenge when a flow's active stage calls `stage_invalid()`.""" | ||||
|  | ||||
|     error_message = CharField(required=False) | ||||
|  | ||||
| @ -1,11 +1,14 @@ | ||||
| """flow exceptions""" | ||||
|  | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.policies.types import PolicyResult | ||||
|  | ||||
|  | ||||
| class FlowNonApplicableException(SentryIgnoredException): | ||||
|     """Flow does not apply to current user (denied by policy).""" | ||||
|  | ||||
|     policy_result: PolicyResult | ||||
|  | ||||
|  | ||||
| class EmptyFlowException(SentryIgnoredException): | ||||
|     """Flow has no stages.""" | ||||
|  | ||||
| @ -10,8 +10,8 @@ def add_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|         "default-invalidation-flow": "Default Invalidation Flow", | ||||
|         "default-source-enrollment": "Welcome to authentik! Please select a username.", | ||||
|         "default-source-authentication": "Welcome to authentik!", | ||||
|         "default-provider-authorization-implicit-consent": "Default Provider Authorization Flow (implicit consent)", | ||||
|         "default-provider-authorization-explicit-consent": "Default Provider Authorization Flow (explicit consent)", | ||||
|         "default-provider-authorization-implicit-consent": "Redirecting to %(app)s", | ||||
|         "default-provider-authorization-explicit-consent": "Redirecting to %(app)s", | ||||
|         "default-password-change": "Change password", | ||||
|     } | ||||
|     db_alias = schema_editor.connection.alias | ||||
|  | ||||
							
								
								
									
										27
									
								
								authentik/flows/migrations/0021_auto_20211227_2103.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								authentik/flows/migrations/0021_auto_20211227_2103.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,27 @@ | ||||
| # Generated by Django 4.0 on 2021-12-27 21:03 | ||||
| from django.apps.registry import Apps | ||||
| from django.db import migrations, models | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
|  | ||||
|  | ||||
| def update_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     slug_title_map = { | ||||
|         "default-provider-authorization-implicit-consent": "Redirecting to %(app)s", | ||||
|         "default-provider-authorization-explicit-consent": "Redirecting to %(app)s", | ||||
|     } | ||||
|     db_alias = schema_editor.connection.alias | ||||
|     Flow = apps.get_model("authentik_flows", "Flow") | ||||
|     for flow in Flow.objects.using(db_alias).all(): | ||||
|         if flow.slug not in slug_title_map: | ||||
|             continue | ||||
|         flow.title = slug_title_map[flow.slug] | ||||
|         flow.save() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_flows", "0020_flowtoken"), | ||||
|     ] | ||||
|  | ||||
|     operations = [migrations.RunPython(update_title_for_defaults)] | ||||
| @ -1,7 +1,7 @@ | ||||
| """Flow models""" | ||||
| from base64 import b64decode, b64encode | ||||
| from pickle import dumps, loads  # nosec | ||||
| from typing import TYPE_CHECKING, Optional, Type | ||||
| from typing import TYPE_CHECKING, Optional | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from django.db import models | ||||
| @ -63,7 +63,7 @@ class Stage(SerializerModel): | ||||
|     objects = InheritanceManager() | ||||
|  | ||||
|     @property | ||||
|     def type(self) -> Type["StageView"]: | ||||
|     def type(self) -> type["StageView"]: | ||||
|         """Return StageView class that implements logic for this stage""" | ||||
|         # This is a bit of a workaround, since we can't set class methods with setattr | ||||
|         if hasattr(self, "__in_memory_type"): | ||||
| @ -86,7 +86,7 @@ class Stage(SerializerModel): | ||||
|         return f"Stage {self.name}" | ||||
|  | ||||
|  | ||||
| def in_memory_stage(view: Type["StageView"]) -> Stage: | ||||
| def in_memory_stage(view: type["StageView"]) -> Stage: | ||||
|     """Creates an in-memory stage instance, based on a `view` as view.""" | ||||
|     stage = Stage() | ||||
|     # Because we can't pickle a locally generated function, | ||||
|  | ||||
| @ -4,7 +4,7 @@ from typing import Any, Optional | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.http import HttpRequest | ||||
| from prometheus_client import Histogram | ||||
| from prometheus_client import Gauge, Histogram | ||||
| from sentry_sdk.hub import Hub | ||||
| from sentry_sdk.tracing import Span | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| @ -16,7 +16,6 @@ from authentik.flows.markers import ReevaluateMarker, StageMarker | ||||
| from authentik.flows.models import Flow, FlowStageBinding, Stage | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.root.monitoring import UpdatingGauge | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| PLAN_CONTEXT_PENDING_USER = "pending_user" | ||||
| @ -27,10 +26,9 @@ PLAN_CONTEXT_SOURCE = "source" | ||||
| # Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan | ||||
| # was restored. | ||||
| PLAN_CONTEXT_IS_RESTORED = "is_restored" | ||||
| GAUGE_FLOWS_CACHED = UpdatingGauge( | ||||
| GAUGE_FLOWS_CACHED = Gauge( | ||||
|     "authentik_flows_cached", | ||||
|     "Cached flows", | ||||
|     update_func=lambda: len(cache.keys("flow_*") or []), | ||||
| ) | ||||
| HIST_FLOWS_PLAN_TIME = Histogram( | ||||
|     "authentik_flows_plan_time", | ||||
| @ -152,7 +150,9 @@ class FlowPlanner: | ||||
|             engine.build() | ||||
|             result = engine.result | ||||
|             if not result.passing: | ||||
|                 raise FlowNonApplicableException(",".join(result.messages)) | ||||
|                 exc = FlowNonApplicableException(",".join(result.messages)) | ||||
|                 exc.policy_result = result | ||||
|                 raise exc | ||||
|             # User is passing so far, check if we have a cached plan | ||||
|             cached_plan_key = cache_key(self.flow, user) | ||||
|             cached_plan = cache.get(cached_plan_key, None) | ||||
| @ -169,7 +169,6 @@ class FlowPlanner: | ||||
|             ) | ||||
|             plan = self._build_plan(user, request, default_context) | ||||
|             cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT) | ||||
|             GAUGE_FLOWS_CACHED.update() | ||||
|             if not plan.bindings and not self.allow_empty_flows: | ||||
|                 raise EmptyFlowException() | ||||
|             return plan | ||||
|  | ||||
| @ -4,6 +4,9 @@ from django.db.models.signals import post_save, pre_delete | ||||
| from django.dispatch import receiver | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.flows.planner import GAUGE_FLOWS_CACHED | ||||
| from authentik.root.monitoring import monitoring_set | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| @ -14,6 +17,13 @@ def delete_cache_prefix(prefix: str) -> int: | ||||
|     return len(keys) | ||||
|  | ||||
|  | ||||
| @receiver(monitoring_set) | ||||
| # pylint: disable=unused-argument | ||||
| def monitoring_set_flows(sender, **kwargs): | ||||
|     """set flow gauges""" | ||||
|     GAUGE_FLOWS_CACHED.set(len(cache.keys("flow_*") or [])) | ||||
|  | ||||
|  | ||||
| @receiver(post_save) | ||||
| @receiver(pre_delete) | ||||
| # pylint: disable=unused-argument | ||||
|  | ||||
| @ -1,4 +1,6 @@ | ||||
| """authentik stage Base view""" | ||||
| from typing import TYPE_CHECKING, Optional | ||||
|  | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.http import HttpRequest | ||||
| from django.http.request import QueryDict | ||||
| @ -11,15 +13,19 @@ from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.core.models import DEFAULT_AVATAR, User | ||||
| from authentik.flows.challenge import ( | ||||
|     AccessDeniedChallenge, | ||||
|     Challenge, | ||||
|     ChallengeResponse, | ||||
|     ChallengeTypes, | ||||
|     ContextualFlowInfo, | ||||
|     HttpChallengeResponse, | ||||
|     WithUserInfoChallenge, | ||||
| ) | ||||
| from authentik.flows.models import InvalidResponseAction | ||||
| from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER | ||||
| from authentik.flows.views.executor import FlowExecutorView | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from authentik.flows.views.executor import FlowExecutorView | ||||
|  | ||||
| PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier" | ||||
| LOGGER = get_logger() | ||||
| @ -28,11 +34,11 @@ LOGGER = get_logger() | ||||
| class StageView(View): | ||||
|     """Abstract Stage, inherits TemplateView but can be combined with FormView""" | ||||
|  | ||||
|     executor: FlowExecutorView | ||||
|     executor: "FlowExecutorView" | ||||
|  | ||||
|     request: HttpRequest = None | ||||
|  | ||||
|     def __init__(self, executor: FlowExecutorView, **kwargs): | ||||
|     def __init__(self, executor: "FlowExecutorView", **kwargs): | ||||
|         self.executor = executor | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
| @ -43,6 +49,8 @@ class StageView(View): | ||||
|         other things besides the form display. | ||||
|  | ||||
|         If no user is pending, returns request.user""" | ||||
|         if not self.executor.plan: | ||||
|             return self.request.user | ||||
|         if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context and for_display: | ||||
|             return User( | ||||
|                 username=self.executor.plan.context.get(PLAN_CONTEXT_PENDING_USER_IDENTIFIER), | ||||
| @ -108,9 +116,14 @@ class ChallengeStageView(StageView): | ||||
|  | ||||
|     def format_title(self) -> str: | ||||
|         """Allow usage of placeholder in flow title.""" | ||||
|         return self.executor.flow.title % { | ||||
|             "app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "") | ||||
|         } | ||||
|         if not self.executor.plan: | ||||
|             return self.executor.flow.title | ||||
|         try: | ||||
|             return self.executor.flow.title % { | ||||
|                 "app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "") | ||||
|             } | ||||
|         except ValueError: | ||||
|             return self.executor.flow.title | ||||
|  | ||||
|     def _get_challenge(self, *args, **kwargs) -> Challenge: | ||||
|         with Hub.current.start_span( | ||||
| @ -169,3 +182,27 @@ class ChallengeStageView(StageView): | ||||
|                 stage_view=self, | ||||
|             ) | ||||
|         return HttpChallengeResponse(challenge_response) | ||||
|  | ||||
|  | ||||
| class AccessDeniedChallengeView(ChallengeStageView): | ||||
|     """Used internally by FlowExecutor's stage_invalid()""" | ||||
|  | ||||
|     error_message: Optional[str] | ||||
|  | ||||
|     def __init__(self, executor: "FlowExecutorView", error_message: Optional[str] = None, **kwargs): | ||||
|         super().__init__(executor, **kwargs) | ||||
|         self.error_message = error_message | ||||
|  | ||||
|     def get_challenge(self, *args, **kwargs) -> Challenge: | ||||
|         return AccessDeniedChallenge( | ||||
|             data={ | ||||
|                 "error_message": self.error_message or "Unknown error", | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|                 "component": "ak-stage-access-denied", | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     # This can never be reached since this challenge is created on demand and only the | ||||
|     # .get() method is called | ||||
|     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:  # pragma: no cover | ||||
|         return self.executor.cancel() | ||||
|  | ||||
| @ -0,0 +1,51 @@ | ||||
| """Test helpers""" | ||||
| from json import loads | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.http.response import HttpResponse | ||||
| from django.urls.base import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.challenge import ChallengeTypes | ||||
| from authentik.flows.models import Flow | ||||
|  | ||||
|  | ||||
| class FlowTestCase(APITestCase): | ||||
|     """Helpers for testing flows and stages.""" | ||||
|  | ||||
|     # pylint: disable=invalid-name | ||||
|     def assertStageResponse( | ||||
|         self, | ||||
|         response: HttpResponse, | ||||
|         flow: Optional[Flow] = None, | ||||
|         user: Optional[User] = None, | ||||
|         **kwargs, | ||||
|     ) -> dict[str, Any]: | ||||
|         """Assert various attributes of a stage response""" | ||||
|         raw_response = loads(response.content.decode()) | ||||
|         self.assertIsNotNone(raw_response["component"]) | ||||
|         self.assertIsNotNone(raw_response["type"]) | ||||
|         if flow: | ||||
|             self.assertIn("flow_info", raw_response) | ||||
|             self.assertEqual(raw_response["flow_info"]["background"], flow.background_url) | ||||
|             self.assertEqual( | ||||
|                 raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel") | ||||
|             ) | ||||
|             # We don't check the flow title since it will most likely go | ||||
|             # through ChallengeStageView.format_title() so might not match 1:1 | ||||
|             # self.assertEqual(raw_response["flow_info"]["title"], flow.title) | ||||
|             self.assertIsNotNone(raw_response["flow_info"]["title"]) | ||||
|         if user: | ||||
|             self.assertEqual(raw_response["pending_user"], user.username) | ||||
|             self.assertEqual(raw_response["pending_user_avatar"], user.avatar) | ||||
|         for key, expected in kwargs.items(): | ||||
|             self.assertEqual(raw_response[key], expected) | ||||
|         return raw_response | ||||
|  | ||||
|     # pylint: disable=invalid-name | ||||
|     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||
|         """Wrapper around assertStageResponse that checks for a redirect""" | ||||
|         return self.assertStageResponse( | ||||
|             response, component="xak-flow-redirect", to=to, type=ChallengeTypes.REDIRECT.value | ||||
|         ) | ||||
|  | ||||
| @ -4,16 +4,14 @@ from unittest.mock import MagicMock, PropertyMock, patch | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.test.client import RequestFactory | ||||
| from django.urls import reverse | ||||
| from django.utils.encoding import force_str | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.challenge import ChallengeTypes | ||||
| from authentik.flows.exceptions import FlowNonApplicableException | ||||
| from authentik.flows.markers import ReevaluateMarker, StageMarker | ||||
| from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction | ||||
| from authentik.flows.planner import FlowPlan, FlowPlanner | ||||
| from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.policies.dummy.models import DummyPolicy | ||||
| @ -37,7 +35,7 @@ def to_stage_response(request: HttpRequest, source: HttpResponse): | ||||
| TO_STAGE_RESPONSE_MOCK = MagicMock(side_effect=to_stage_response) | ||||
|  | ||||
|  | ||||
| class TestFlowExecutor(APITestCase): | ||||
| class TestFlowExecutor(FlowTestCase): | ||||
|     """Test executor""" | ||||
|  | ||||
|     def setUp(self): | ||||
| @ -90,18 +88,11 @@ class TestFlowExecutor(APITestCase): | ||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "ak-stage-access-denied", | ||||
|                 "error_message": FlowNonApplicableException.__doc__, | ||||
|                 "flow_info": { | ||||
|                     "background": flow.background_url, | ||||
|                     "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     "title": "", | ||||
|                 }, | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|             }, | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             flow=flow, | ||||
|             error_message=FlowNonApplicableException.__doc__, | ||||
|             component="ak-stage-access-denied", | ||||
|         ) | ||||
|  | ||||
|     @patch( | ||||
| @ -283,14 +274,7 @@ class TestFlowExecutor(APITestCase): | ||||
|         # We do this request without the patch, so the policy results in false | ||||
|         response = self.client.post(exec_url) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "xak-flow-redirect", | ||||
|                 "to": reverse("authentik_core:root-redirect"), | ||||
|                 "type": ChallengeTypes.REDIRECT.value, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_reevaluate_keep(self): | ||||
|         """Test planner with re-evaluate (everything is kept)""" | ||||
| @ -360,14 +344,7 @@ class TestFlowExecutor(APITestCase): | ||||
|         # We do this request without the patch, so the policy results in false | ||||
|         response = self.client.post(exec_url) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "xak-flow-redirect", | ||||
|                 "to": reverse("authentik_core:root-redirect"), | ||||
|                 "type": ChallengeTypes.REDIRECT.value, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_reevaluate_remove_consecutive(self): | ||||
|         """Test planner with re-evaluate (consecutive stages are removed)""" | ||||
| @ -407,18 +384,7 @@ class TestFlowExecutor(APITestCase): | ||||
|             # First request, run the planner | ||||
|             response = self.client.get(exec_url) | ||||
|             self.assertEqual(response.status_code, 200) | ||||
|             self.assertJSONEqual( | ||||
|                 force_str(response.content), | ||||
|                 { | ||||
|                     "type": ChallengeTypes.NATIVE.value, | ||||
|                     "component": "ak-stage-dummy", | ||||
|                     "flow_info": { | ||||
|                         "background": flow.background_url, | ||||
|                         "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                         "title": "", | ||||
|                     }, | ||||
|                 }, | ||||
|             ) | ||||
|             self.assertStageResponse(response, flow, component="ak-stage-dummy") | ||||
|  | ||||
|             plan: FlowPlan = self.client.session[SESSION_KEY_PLAN] | ||||
|  | ||||
| @ -441,31 +407,13 @@ class TestFlowExecutor(APITestCase): | ||||
|         # but it won't save it, hence we can't check the plan | ||||
|         response = self.client.get(exec_url) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|                 "component": "ak-stage-dummy", | ||||
|                 "flow_info": { | ||||
|                     "background": flow.background_url, | ||||
|                     "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     "title": "", | ||||
|                 }, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertStageResponse(response, flow, component="ak-stage-dummy") | ||||
|  | ||||
|         # fourth request, this confirms the last stage (dummy4) | ||||
|         # We do this request without the patch, so the policy results in false | ||||
|         response = self.client.post(exec_url) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "xak-flow-redirect", | ||||
|                 "to": reverse("authentik_core:root-redirect"), | ||||
|                 "type": ChallengeTypes.REDIRECT.value, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||
|  | ||||
|     def test_stageview_user_identifier(self): | ||||
|         """Test PLAN_CONTEXT_PENDING_USER_IDENTIFIER""" | ||||
| @ -532,35 +480,16 @@ class TestFlowExecutor(APITestCase): | ||||
|         # First request, run the planner | ||||
|         response = self.client.get(exec_url) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|                 "component": "ak-stage-identification", | ||||
|                 "flow_info": { | ||||
|                     "background": flow.background_url, | ||||
|                     "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     "title": "", | ||||
|                 }, | ||||
|                 "password_fields": False, | ||||
|                 "primary_action": "Log in", | ||||
|                 "sources": [], | ||||
|                 "show_source_labels": False, | ||||
|                 "user_fields": [UserFields.E_MAIL], | ||||
|             }, | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             flow, | ||||
|             component="ak-stage-identification", | ||||
|             password_fields=False, | ||||
|             primary_action="Log in", | ||||
|             sources=[], | ||||
|             show_source_labels=False, | ||||
|             user_fields=[UserFields.E_MAIL], | ||||
|         ) | ||||
|         response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "ak-stage-access-denied", | ||||
|                 "error_message": None, | ||||
|                 "flow_info": { | ||||
|                     "background": flow.background_url, | ||||
|                     "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     "title": "", | ||||
|                 }, | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|             }, | ||||
|         ) | ||||
|         self.assertStageResponse(response, flow, component="ak-stage-access-denied") | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| """base model tests""" | ||||
| from typing import Callable, Type | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TestCase | ||||
|  | ||||
| @ -12,7 +12,7 @@ class TestModels(TestCase): | ||||
|     """Generic model properties tests""" | ||||
|  | ||||
|  | ||||
| def model_tester_factory(test_model: Type[Stage]) -> Callable: | ||||
| def model_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test a form""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| """stage view tests""" | ||||
| from typing import Callable, Type | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import RequestFactory, TestCase | ||||
|  | ||||
| @ -16,7 +16,7 @@ class TestViews(TestCase): | ||||
|         self.exec = FlowExecutorView(request=self.factory.get("/")) | ||||
|  | ||||
|  | ||||
| def view_tester_factory(view_class: Type[StageView]) -> Callable: | ||||
| def view_tester_factory(view_class: type[StageView]) -> Callable: | ||||
|     """Test a form""" | ||||
|  | ||||
|     def tester(self: TestViews): | ||||
|  | ||||
| @ -2,7 +2,7 @@ | ||||
| from contextlib import contextmanager | ||||
| from copy import deepcopy | ||||
| from json import loads | ||||
| from typing import Any, Type | ||||
| from typing import Any | ||||
|  | ||||
| from dacite import from_dict | ||||
| from dacite.exceptions import DaciteError | ||||
| @ -87,7 +87,7 @@ class FlowImporter: | ||||
|     def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer: | ||||
|         """Validate a single entry""" | ||||
|         model_app_label, model_name = entry.model.split(".") | ||||
|         model: Type[SerializerModel] = apps.get_model(model_app_label, model_name) | ||||
|         model: type[SerializerModel] = apps.get_model(model_app_label, model_name) | ||||
|         if not isinstance(model(), ALLOWED_MODELS): | ||||
|             raise EntryInvalidError(f"Model {model} not allowed") | ||||
|  | ||||
|  | ||||
| @ -10,7 +10,6 @@ from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect | ||||
| from django.http.request import QueryDict | ||||
| from django.shortcuts import get_object_or_404, redirect | ||||
| from django.template.response import TemplateResponse | ||||
| from django.urls.base import reverse | ||||
| from django.utils.decorators import method_decorator | ||||
| from django.views.decorators.clickjacking import xframe_options_sameorigin | ||||
| from django.views.generic import View | ||||
| @ -26,7 +25,6 @@ from structlog.stdlib import BoundLogger, get_logger | ||||
| from authentik.core.models import USER_ATTRIBUTE_DEBUG | ||||
| from authentik.events.models import Event, EventAction, cleanse_dict | ||||
| from authentik.flows.challenge import ( | ||||
|     AccessDeniedChallenge, | ||||
|     Challenge, | ||||
|     ChallengeResponse, | ||||
|     ChallengeTypes, | ||||
| @ -51,6 +49,7 @@ from authentik.flows.planner import ( | ||||
|     FlowPlan, | ||||
|     FlowPlanner, | ||||
| ) | ||||
| from authentik.flows.stage import AccessDeniedChallengeView | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
| from authentik.lib.utils.reflection import all_subclasses, class_to_path | ||||
| @ -371,12 +370,6 @@ class FlowExecutorView(APIView): | ||||
|             NEXT_ARG_NAME, "authentik_core:root-redirect" | ||||
|         ) | ||||
|         self.cancel() | ||||
|         Event.new( | ||||
|             action=EventAction.FLOW_EXECUTION, | ||||
|             flow=self.flow, | ||||
|             designation=self.flow.designation, | ||||
|             successful=True, | ||||
|         ).from_http(self.request) | ||||
|         return to_stage_response(self.request, redirect_with_qs(next_param)) | ||||
|  | ||||
|     def stage_ok(self) -> HttpResponse: | ||||
| @ -412,21 +405,9 @@ class FlowExecutorView(APIView): | ||||
|         is a superuser.""" | ||||
|         self._logger.debug("f(exec): Stage invalid") | ||||
|         self.cancel() | ||||
|         response = HttpChallengeResponse( | ||||
|             AccessDeniedChallenge( | ||||
|                 { | ||||
|                     "error_message": error_message, | ||||
|                     "type": ChallengeTypes.NATIVE.value, | ||||
|                     "component": "ak-stage-access-denied", | ||||
|                     "flow_info": { | ||||
|                         "title": self.flow.title, | ||||
|                         "background": self.flow.background_url, | ||||
|                         "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     }, | ||||
|                 } | ||||
|             ) | ||||
|         ) | ||||
|         return to_stage_response(self.request, response) | ||||
|         challenge_view = AccessDeniedChallengeView(self, error_message) | ||||
|         challenge_view.request = self.request | ||||
|         return to_stage_response(self.request, challenge_view.get(self.request)) | ||||
|  | ||||
|     def cancel(self): | ||||
|         """Cancel current execution and return a redirect""" | ||||
|  | ||||
| @ -6,7 +6,7 @@ postgresql: | ||||
|   port: 5432 | ||||
|   password: 'env://POSTGRES_PASSWORD' | ||||
|   backup: | ||||
|     enabled: true | ||||
|     enabled: false | ||||
|   s3_backup: | ||||
|     access_key: "" | ||||
|     secret_key: "" | ||||
| @ -64,7 +64,7 @@ outposts: | ||||
|   # %(type)s: Outpost type; proxy, ldap, etc | ||||
|   # %(version)s: Current version; 2021.4.1 | ||||
|   # %(build_hash)s: Build hash if you're running a beta version | ||||
|   container_image_base: goauthentik.io/%(type)s:%(version)s | ||||
|   container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s | ||||
|  | ||||
| cookie_domain: null | ||||
| disable_update_check: false | ||||
| @ -78,6 +78,7 @@ footer_links: | ||||
|   - name: authentik Website | ||||
|     href: https://goauthentik.io/?utm_source=authentik | ||||
|  | ||||
| default_user_change_name: true | ||||
| default_user_change_email: true | ||||
| default_user_change_username: true | ||||
|  | ||||
|  | ||||
| @ -32,6 +32,7 @@ class BaseEvaluator: | ||||
|         self._globals = { | ||||
|             "regex_match": BaseEvaluator.expr_regex_match, | ||||
|             "regex_replace": BaseEvaluator.expr_regex_replace, | ||||
|             "list_flatten": BaseEvaluator.expr_flatten, | ||||
|             "ak_is_group_member": BaseEvaluator.expr_is_group_member, | ||||
|             "ak_user_by": BaseEvaluator.expr_user_by, | ||||
|             "ak_logger": get_logger(), | ||||
| @ -40,6 +41,15 @@ class BaseEvaluator: | ||||
|         self._context = {} | ||||
|         self._filename = "BaseEvalautor" | ||||
|  | ||||
|     @staticmethod | ||||
|     def expr_flatten(value: list[Any] | Any) -> Optional[Any]: | ||||
|         """Flatten `value` if its a list""" | ||||
|         if isinstance(value, list): | ||||
|             if len(value) < 1: | ||||
|                 return None | ||||
|             return value[0] | ||||
|         return value | ||||
|  | ||||
|     @staticmethod | ||||
|     def expr_regex_match(value: Any, regex: str) -> bool: | ||||
|         """Expression Filter to run re.search""" | ||||
|  | ||||
| @ -97,7 +97,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | ||||
|     if "exc_info" in hint: | ||||
|         _, exc_value, _ = hint["exc_info"] | ||||
|         if isinstance(exc_value, ignored_classes): | ||||
|             LOGGER.debug("dropping exception", exception=exc_value) | ||||
|             LOGGER.debug("dropping exception", exc=exc_value) | ||||
|             return None | ||||
|     if "logger" in event: | ||||
|         if event["logger"] in [ | ||||
| @ -108,9 +108,13 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | ||||
|             "multiprocessing", | ||||
|             "django_redis", | ||||
|             "django.security.DisallowedHost", | ||||
|             "django_redis.cache", | ||||
|             "celery.backends.redis", | ||||
|             "celery.worker", | ||||
|             "paramiko.transport", | ||||
|         ]: | ||||
|             return None | ||||
|     LOGGER.debug("sending event to sentry", exc=exc_value, source_logger=event.get("logger", None)) | ||||
|     if settings.DEBUG: | ||||
|     if settings.DEBUG or settings.TEST: | ||||
|         return None | ||||
|     return event | ||||
|  | ||||
| @ -13,4 +13,4 @@ class TestSentry(TestCase): | ||||
|  | ||||
|     def test_error_sent(self): | ||||
|         """Test error sent""" | ||||
|         self.assertEqual({}, before_send({}, {"exc_info": (0, ValueError(), 0)})) | ||||
|         self.assertEqual(None, before_send({}, {"exc_info": (0, ValueError(), 0)})) | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| """base model tests""" | ||||
| from typing import Callable, Type | ||||
| from typing import Callable | ||||
|  | ||||
| from django.test import TestCase | ||||
| from rest_framework.serializers import BaseSerializer | ||||
| @ -13,7 +13,7 @@ class TestModels(TestCase): | ||||
|     """Generic model properties tests""" | ||||
|  | ||||
|  | ||||
| def model_tester_factory(test_model: Type[Stage]) -> Callable: | ||||
| def model_tester_factory(test_model: type[Stage]) -> Callable: | ||||
|     """Test a form""" | ||||
|  | ||||
|     def tester(self: TestModels): | ||||
|  | ||||
| @ -1,5 +1,4 @@ | ||||
| """http helpers""" | ||||
| from os import environ | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.http import HttpRequest | ||||
| @ -7,7 +6,7 @@ from requests.sessions import Session | ||||
| from sentry_sdk.hub import Hub | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik import get_full_version | ||||
|  | ||||
| OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP" | ||||
| OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN"  # nosec | ||||
| @ -75,8 +74,7 @@ def get_client_ip(request: Optional[HttpRequest]) -> str: | ||||
|  | ||||
| def authentik_user_agent() -> str: | ||||
|     """Get a common user agent""" | ||||
|     build = environ.get(ENV_GIT_HASH_KEY, "tagged") | ||||
|     return f"authentik@{__version__} (build={build})" | ||||
|     return f"authentik@{get_full_version()}" | ||||
|  | ||||
|  | ||||
| def get_http_session() -> Session: | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
| import os | ||||
| from importlib import import_module | ||||
| from pathlib import Path | ||||
| from typing import Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME | ||||
| @ -30,7 +29,7 @@ def class_to_path(cls: type) -> str: | ||||
|     return f"{cls.__module__}.{cls.__name__}" | ||||
|  | ||||
|  | ||||
| def path_to_class(path: Union[str, None]) -> Union[type, None]: | ||||
| def path_to_class(path: str | None) -> type | None: | ||||
|     """Import module and return class""" | ||||
|     if not path: | ||||
|         return None | ||||
| @ -59,4 +58,6 @@ def get_env() -> str: | ||||
|         return "compose" | ||||
|     if CONFIG.y_bool("debug"): | ||||
|         return "dev" | ||||
|     if "AK_APPLIANCE" in os.environ: | ||||
|         return os.environ["AK_APPLIANCE"] | ||||
|     return "custom" | ||||
|  | ||||
| @ -34,7 +34,7 @@ def timedelta_from_string(expr: str) -> datetime.timedelta: | ||||
|         key, value = duration_pair.split("=") | ||||
|         if key.lower() not in ALLOWED_KEYS: | ||||
|             continue | ||||
|         kwargs[key.lower()] = float(value) | ||||
|         kwargs[key.lower()] = float(value.strip()) | ||||
|     if len(kwargs) < 1: | ||||
|         raise ValueError("No valid keys to pass to timedelta") | ||||
|     return datetime.timedelta(**kwargs) | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| """Managed objects manager""" | ||||
| from typing import Callable, Optional, Type | ||||
| from typing import Callable, Optional | ||||
|  | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| @ -11,11 +11,11 @@ LOGGER = get_logger() | ||||
| class EnsureOp: | ||||
|     """Ensure operation, executed as part of an ObjectManager run""" | ||||
|  | ||||
|     _obj: Type[ManagedModel] | ||||
|     _obj: type[ManagedModel] | ||||
|     _managed_uid: str | ||||
|     _kwargs: dict | ||||
|  | ||||
|     def __init__(self, obj: Type[ManagedModel], managed_uid: str, **kwargs) -> None: | ||||
|     def __init__(self, obj: type[ManagedModel], managed_uid: str, **kwargs) -> None: | ||||
|         self._obj = obj | ||||
|         self._managed_uid = managed_uid | ||||
|         self._kwargs = kwargs | ||||
| @ -32,7 +32,7 @@ class EnsureExists(EnsureOp): | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         obj: Type[ManagedModel], | ||||
|         obj: type[ManagedModel], | ||||
|         managed_uid: str, | ||||
|         created_callback: Optional[Callable] = None, | ||||
|         **kwargs, | ||||
|  | ||||
| @ -12,6 +12,7 @@ from rest_framework.response import Response | ||||
| from rest_framework.serializers import JSONField, ModelSerializer, ValidationError | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik import get_build_hash | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.core.api.utils import PassiveSerializer, is_dict | ||||
| @ -98,8 +99,12 @@ class OutpostHealthSerializer(PassiveSerializer): | ||||
|     last_seen = DateTimeField(read_only=True) | ||||
|     version = CharField(read_only=True) | ||||
|     version_should = CharField(read_only=True) | ||||
|  | ||||
|     version_outdated = BooleanField(read_only=True) | ||||
|  | ||||
|     build_hash = CharField(read_only=True, required=False) | ||||
|     build_hash_should = CharField(read_only=True, required=False) | ||||
|  | ||||
|  | ||||
| class OutpostFilter(FilterSet): | ||||
|     """Filter for Outposts""" | ||||
| @ -116,6 +121,7 @@ class OutpostFilter(FilterSet): | ||||
|             "providers": ["isnull"], | ||||
|             "name": ["iexact", "icontains"], | ||||
|             "service_connection__name": ["iexact", "icontains"], | ||||
|             "managed": ["iexact", "icontains"], | ||||
|         } | ||||
|  | ||||
|  | ||||
| @ -145,6 +151,8 @@ class OutpostViewSet(UsedByMixin, ModelViewSet): | ||||
|                     "version": state.version, | ||||
|                     "version_should": state.version_should, | ||||
|                     "version_outdated": state.version_outdated, | ||||
|                     "build_hash": state.build_hash, | ||||
|                     "build_hash_should": get_build_hash(), | ||||
|                 } | ||||
|             ) | ||||
|         return Response(OutpostHealthSerializer(states, many=True).data) | ||||
|  | ||||
| @ -55,6 +55,10 @@ class OutpostConsumer(AuthJsonConsumer): | ||||
|  | ||||
|     first_msg = False | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.logger = get_logger() | ||||
|  | ||||
|     def connect(self): | ||||
|         super().connect() | ||||
|         uuid = self.scope["url_route"]["kwargs"]["pk"] | ||||
| @ -65,7 +69,7 @@ class OutpostConsumer(AuthJsonConsumer): | ||||
|         ) | ||||
|         if not outpost: | ||||
|             raise DenyConnection() | ||||
|         self.logger = get_logger().bind(outpost=outpost) | ||||
|         self.logger = self.logger.bind(outpost=outpost) | ||||
|         try: | ||||
|             self.accept() | ||||
|         except RuntimeError as exc: | ||||
|  | ||||
| @ -1,15 +1,18 @@ | ||||
| """Base Controller""" | ||||
| from dataclasses import dataclass | ||||
| from os import environ | ||||
| from typing import Optional | ||||
|  | ||||
| from structlog.stdlib import get_logger | ||||
| from structlog.testing import capture_logs | ||||
|  | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||
| from authentik.outposts.models import ( | ||||
|     Outpost, | ||||
|     OutpostServiceConnection, | ||||
|     OutpostServiceConnectionState, | ||||
| ) | ||||
|  | ||||
| FIELD_MANAGER = "goauthentik.io" | ||||
|  | ||||
| @ -28,11 +31,25 @@ class DeploymentPort: | ||||
|     inner_port: Optional[int] = None | ||||
|  | ||||
|  | ||||
| class BaseClient: | ||||
|     """Base class for custom clients""" | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         """Get state, version info""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __enter__(self): | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, exc_type, exc_value, traceback): | ||||
|         """Cleanup after usage""" | ||||
|  | ||||
|  | ||||
| class BaseController: | ||||
|     """Base Outpost deployment controller""" | ||||
|  | ||||
|     deployment_ports: list[DeploymentPort] | ||||
|  | ||||
|     client: BaseClient | ||||
|     outpost: Outpost | ||||
|     connection: OutpostServiceConnection | ||||
|  | ||||
| @ -63,6 +80,14 @@ class BaseController: | ||||
|             self.down() | ||||
|         return [x["event"] for x in logs] | ||||
|  | ||||
|     def __enter__(self): | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, exc_type, exc_value, traceback): | ||||
|         """Cleanup after usage""" | ||||
|         if hasattr(self, "client"): | ||||
|             self.client.__exit__(exc_type, exc_value, traceback) | ||||
|  | ||||
|     def get_static_deployment(self) -> str: | ||||
|         """Return a static deployment configuration""" | ||||
|         raise NotImplementedError | ||||
| @ -76,5 +101,5 @@ class BaseController: | ||||
|         return image_name_template % { | ||||
|             "type": self.outpost.type, | ||||
|             "version": __version__, | ||||
|             "build_hash": environ.get(ENV_GIT_HASH_KEY, ""), | ||||
|             "build_hash": get_build_hash(), | ||||
|         } | ||||
|  | ||||
| @ -1,17 +1,79 @@ | ||||
| """Docker controller""" | ||||
| from time import sleep | ||||
| from typing import Optional | ||||
| from urllib.parse import urlparse | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.utils.text import slugify | ||||
| from docker import DockerClient | ||||
| from docker import DockerClient as UpstreamDockerClient | ||||
| from docker.errors import DockerException, NotFound | ||||
| from docker.models.containers import Container | ||||
| from docker.utils.utils import kwargs_from_env | ||||
| from paramiko.ssh_exception import SSHException | ||||
| from structlog.stdlib import get_logger | ||||
| from yaml import safe_dump | ||||
|  | ||||
| from authentik import __version__ | ||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | ||||
| from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||
| from authentik.outposts.docker_ssh import DockerInlineSSH | ||||
| from authentik.outposts.docker_tls import DockerInlineTLS | ||||
| from authentik.outposts.managed import MANAGED_OUTPOST | ||||
| from authentik.outposts.models import DockerServiceConnection, Outpost, ServiceConnectionInvalid | ||||
| from authentik.outposts.models import ( | ||||
|     DockerServiceConnection, | ||||
|     Outpost, | ||||
|     OutpostServiceConnectionState, | ||||
|     ServiceConnectionInvalid, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class DockerClient(UpstreamDockerClient, BaseClient): | ||||
|     """Custom docker client, which can handle TLS and SSH from a database.""" | ||||
|  | ||||
|     tls: Optional[DockerInlineTLS] | ||||
|     ssh: Optional[DockerInlineSSH] | ||||
|  | ||||
|     def __init__(self, connection: DockerServiceConnection): | ||||
|         self.tls = None | ||||
|         self.ssh = None | ||||
|         if connection.local: | ||||
|             # Same result as DockerClient.from_env | ||||
|             super().__init__(**kwargs_from_env()) | ||||
|         else: | ||||
|             parsed_url = urlparse(connection.url) | ||||
|             tls_config = False | ||||
|             if parsed_url.scheme == "ssh": | ||||
|                 self.ssh = DockerInlineSSH(parsed_url.hostname, connection.tls_authentication) | ||||
|                 self.ssh.write() | ||||
|             else: | ||||
|                 self.tls = DockerInlineTLS( | ||||
|                     verification_kp=connection.tls_verification, | ||||
|                     authentication_kp=connection.tls_authentication, | ||||
|                 ) | ||||
|                 tls_config = self.tls.write() | ||||
|             try: | ||||
|                 super().__init__( | ||||
|                     base_url=connection.url, | ||||
|                     tls=tls_config, | ||||
|                 ) | ||||
|             except SSHException as exc: | ||||
|                 raise ServiceConnectionInvalid from exc | ||||
|         self.logger = get_logger() | ||||
|         # Ensure the client actually works | ||||
|         self.containers.list() | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         try: | ||||
|             return OutpostServiceConnectionState(version=self.info()["ServerVersion"], healthy=True) | ||||
|         except (ServiceConnectionInvalid, DockerException): | ||||
|             return OutpostServiceConnectionState(version="", healthy=False) | ||||
|  | ||||
|     def __exit__(self, exc_type, exc_value, traceback): | ||||
|         if self.tls: | ||||
|             self.logger.debug("Cleaning up TLS") | ||||
|             self.tls.cleanup() | ||||
|         if self.ssh: | ||||
|             self.logger.debug("Cleaning up SSH") | ||||
|             self.ssh.cleanup() | ||||
|  | ||||
|  | ||||
| class DockerController(BaseController): | ||||
| @ -27,8 +89,9 @@ class DockerController(BaseController): | ||||
|         if outpost.managed == MANAGED_OUTPOST: | ||||
|             return | ||||
|         try: | ||||
|             self.client = connection.client() | ||||
|         except ServiceConnectionInvalid as exc: | ||||
|             self.client = DockerClient(connection) | ||||
|         except DockerException as exc: | ||||
|             self.logger.warning(exc) | ||||
|             raise ControllerException from exc | ||||
|  | ||||
|     @property | ||||
| @ -43,9 +106,12 @@ class DockerController(BaseController): | ||||
|         ).lower() | ||||
|  | ||||
|     def _get_labels(self) -> dict[str, str]: | ||||
|         return { | ||||
|         labels = { | ||||
|             "io.goauthentik.outpost-uuid": self.outpost.pk.hex, | ||||
|         } | ||||
|         if self.outpost.config.docker_labels: | ||||
|             labels.update(self.outpost.config.docker_labels) | ||||
|         return labels | ||||
|  | ||||
|     def _get_env(self) -> dict[str, str]: | ||||
|         return { | ||||
| @ -110,7 +176,7 @@ class DockerController(BaseController): | ||||
|         image = self.get_container_image() | ||||
|         try: | ||||
|             self.client.images.pull(image) | ||||
|         except DockerException: | ||||
|         except DockerException:  # pragma: no cover | ||||
|             image = f"goauthentik.io/{self.outpost.type}:latest" | ||||
|             self.client.images.pull(image) | ||||
|         return image | ||||
| @ -144,7 +210,7 @@ class DockerController(BaseController): | ||||
|                 True, | ||||
|             ) | ||||
|  | ||||
|     def _migrate_container_name(self): | ||||
|     def _migrate_container_name(self):  # pragma: no cover | ||||
|         """Migrate 2021.9 to 2021.10+""" | ||||
|         old_name = f"authentik-proxy-{self.outpost.uuid.hex}" | ||||
|         try: | ||||
| @ -169,7 +235,7 @@ class DockerController(BaseController): | ||||
|             # Check if the container is out of date, delete it and retry | ||||
|             if len(container.image.tags) > 0: | ||||
|                 should_image = self.try_pull_image() | ||||
|                 if should_image not in container.image.tags: | ||||
|                 if should_image not in container.image.tags:  # pragma: no cover | ||||
|                     self.logger.info( | ||||
|                         "Container has mismatched image, re-creating...", | ||||
|                         has=container.image.tags, | ||||
|  | ||||
| @ -20,6 +20,11 @@ if TYPE_CHECKING: | ||||
| T = TypeVar("T", V1Pod, V1Deployment) | ||||
|  | ||||
|  | ||||
| def get_version() -> str: | ||||
|     """Wrapper for __version__ to make testing easier""" | ||||
|     return __version__ | ||||
|  | ||||
|  | ||||
| class KubernetesObjectReconciler(Generic[T]): | ||||
|     """Base Kubernetes Reconciler, handles the basic logic.""" | ||||
|  | ||||
| @ -146,13 +151,13 @@ class KubernetesObjectReconciler(Generic[T]): | ||||
|         return V1ObjectMeta( | ||||
|             namespace=self.namespace, | ||||
|             labels={ | ||||
|                 "app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}", | ||||
|                 "app.kubernetes.io/instance": slugify(self.controller.outpost.name), | ||||
|                 "app.kubernetes.io/version": __version__, | ||||
|                 "app.kubernetes.io/managed-by": "goauthentik.io", | ||||
|                 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, | ||||
|                 "goauthentik.io/outpost-type": str(self.controller.outpost.type), | ||||
|                 "app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}", | ||||
|                 "app.kubernetes.io/version": get_version(), | ||||
|                 "goauthentik.io/outpost-name": slugify(self.controller.outpost.name), | ||||
|                 "goauthentik.io/outpost-type": str(self.controller.outpost.type), | ||||
|                 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, | ||||
|             }, | ||||
|             **kwargs, | ||||
|         ) | ||||
|  | ||||
| @ -18,6 +18,7 @@ from kubernetes.client import ( | ||||
|     V1SecretKeySelector, | ||||
| ) | ||||
|  | ||||
| from authentik import __version__, get_full_version | ||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | ||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.triggers import NeedsUpdate | ||||
| @ -52,15 +53,18 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]): | ||||
|             raise NeedsUpdate() | ||||
|         super().reconcile(current, reference) | ||||
|  | ||||
|     def get_pod_meta(self) -> dict[str, str]: | ||||
|     def get_pod_meta(self, **kwargs) -> dict[str, str]: | ||||
|         """Get common object metadata""" | ||||
|         return { | ||||
|             "app.kubernetes.io/name": "authentik-outpost", | ||||
|             "app.kubernetes.io/managed-by": "goauthentik.io", | ||||
|             "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, | ||||
|             "goauthentik.io/outpost-name": slugify(self.controller.outpost.name), | ||||
|             "goauthentik.io/outpost-type": str(self.controller.outpost.type), | ||||
|         } | ||||
|         kwargs.update( | ||||
|             { | ||||
|                 "app.kubernetes.io/name": f"authentik-outpost-{self.outpost.type}", | ||||
|                 "app.kubernetes.io/managed-by": "goauthentik.io", | ||||
|                 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, | ||||
|                 "goauthentik.io/outpost-name": slugify(self.controller.outpost.name), | ||||
|                 "goauthentik.io/outpost-type": str(self.controller.outpost.type), | ||||
|             } | ||||
|         ) | ||||
|         return kwargs | ||||
|  | ||||
|     def get_reference_object(self) -> V1Deployment: | ||||
|         """Get deployment object for outpost""" | ||||
| @ -77,13 +81,24 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]): | ||||
|         meta = self.get_object_meta(name=self.name) | ||||
|         image_name = self.controller.get_container_image() | ||||
|         image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets | ||||
|         version = get_full_version() | ||||
|         return V1Deployment( | ||||
|             metadata=meta, | ||||
|             spec=V1DeploymentSpec( | ||||
|                 replicas=self.outpost.config.kubernetes_replicas, | ||||
|                 selector=V1LabelSelector(match_labels=self.get_pod_meta()), | ||||
|                 template=V1PodTemplateSpec( | ||||
|                     metadata=V1ObjectMeta(labels=self.get_pod_meta()), | ||||
|                     metadata=V1ObjectMeta( | ||||
|                         labels=self.get_pod_meta( | ||||
|                             **{ | ||||
|                                 # Support istio-specific labels, but also use the standard k8s | ||||
|                                 # recommendations | ||||
|                                 "app.kubernetes.io/version": version, | ||||
|                                 "app": "authentik-outpost", | ||||
|                                 "version": version, | ||||
|                             } | ||||
|                         ) | ||||
|                     ), | ||||
|                     spec=V1PodSpec( | ||||
|                         image_pull_secrets=[ | ||||
|                             V1ObjectReference(name=secret) for secret in image_pull_secrets | ||||
|  | ||||
| @ -6,6 +6,7 @@ from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec | ||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | ||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler | ||||
| from authentik.outposts.controllers.k8s.triggers import NeedsUpdate | ||||
| from authentik.outposts.controllers.k8s.utils import compare_ports | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
| @ -25,6 +26,8 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]): | ||||
|         # after an authentik update. However the ports might have also changed during | ||||
|         # the update, so this causes the service to be re-created with higher | ||||
|         # priority than being updated. | ||||
|         if current.spec.selector != reference.spec.selector: | ||||
|             raise NeedsUpdate() | ||||
|         super().reconcile(current, reference) | ||||
|  | ||||
|     def get_reference_object(self) -> V1Service: | ||||
|  | ||||
| @ -2,6 +2,7 @@ | ||||
| from pathlib import Path | ||||
|  | ||||
| from kubernetes.client.models.v1_container_port import V1ContainerPort | ||||
| from kubernetes.client.models.v1_service_port import V1ServicePort | ||||
| from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME | ||||
|  | ||||
| from authentik.outposts.controllers.k8s.triggers import NeedsRecreate | ||||
| @ -16,10 +17,31 @@ def get_namespace() -> str: | ||||
|     return "default" | ||||
|  | ||||
|  | ||||
| def compare_ports(current: list[V1ContainerPort], reference: list[V1ContainerPort]): | ||||
| def compare_port( | ||||
|     current: V1ServicePort | V1ContainerPort, reference: V1ServicePort | V1ContainerPort | ||||
| ) -> bool: | ||||
|     """Compare a single port""" | ||||
|     if current.name != reference.name: | ||||
|         return False | ||||
|     if current.protocol != reference.protocol: | ||||
|         return False | ||||
|     if isinstance(current, V1ServicePort) and isinstance(reference, V1ServicePort): | ||||
|         # We only care about the target port | ||||
|         if current.target_port != reference.target_port: | ||||
|             return False | ||||
|     if isinstance(current, V1ContainerPort) and isinstance(reference, V1ContainerPort): | ||||
|         # We only care about the target port | ||||
|         if current.container_port != reference.container_port: | ||||
|             return False | ||||
|     return True | ||||
|  | ||||
|  | ||||
| def compare_ports( | ||||
|     current: list[V1ServicePort | V1ContainerPort], reference: list[V1ServicePort | V1ContainerPort] | ||||
| ): | ||||
|     """Compare ports of a list""" | ||||
|     if len(current) != len(reference): | ||||
|         raise NeedsRecreate() | ||||
|     for port in reference: | ||||
|         if port not in current: | ||||
|         if not any(compare_port(port, current_port) for current_port in current): | ||||
|             raise NeedsRecreate() | ||||
|  | ||||
| @ -1,34 +1,67 @@ | ||||
| """Kubernetes deployment controller""" | ||||
| from io import StringIO | ||||
| from typing import Type | ||||
|  | ||||
| from kubernetes.client import VersionApi, VersionInfo | ||||
| from kubernetes.client.api_client import ApiClient | ||||
| from kubernetes.client.configuration import Configuration | ||||
| from kubernetes.client.exceptions import OpenApiException | ||||
| from kubernetes.config.config_exception import ConfigException | ||||
| from kubernetes.config.incluster_config import load_incluster_config | ||||
| from kubernetes.config.kube_config import load_kube_config_from_dict | ||||
| from structlog.testing import capture_logs | ||||
| from urllib3.exceptions import HTTPError | ||||
| from yaml import dump_all | ||||
|  | ||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | ||||
| from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler | ||||
| from authentik.outposts.controllers.k8s.secret import SecretReconciler | ||||
| from authentik.outposts.controllers.k8s.service import ServiceReconciler | ||||
| from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler | ||||
| from authentik.outposts.models import KubernetesServiceConnection, Outpost, ServiceConnectionInvalid | ||||
| from authentik.outposts.models import ( | ||||
|     KubernetesServiceConnection, | ||||
|     Outpost, | ||||
|     OutpostServiceConnectionState, | ||||
|     ServiceConnectionInvalid, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class KubernetesClient(ApiClient, BaseClient): | ||||
|     """Custom kubernetes client based on service connection""" | ||||
|  | ||||
|     def __init__(self, connection: KubernetesServiceConnection): | ||||
|         config = Configuration() | ||||
|         try: | ||||
|             if connection.local: | ||||
|                 load_incluster_config(client_configuration=config) | ||||
|             else: | ||||
|                 load_kube_config_from_dict(connection.kubeconfig, client_configuration=config) | ||||
|             super().__init__(config) | ||||
|         except ConfigException as exc: | ||||
|             raise ServiceConnectionInvalid from exc | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         """Get version info""" | ||||
|         try: | ||||
|             api_instance = VersionApi(self) | ||||
|             version: VersionInfo = api_instance.get_code() | ||||
|             return OutpostServiceConnectionState(version=version.git_version, healthy=True) | ||||
|         except (OpenApiException, HTTPError, ServiceConnectionInvalid): | ||||
|             return OutpostServiceConnectionState(version="", healthy=False) | ||||
|  | ||||
|  | ||||
| class KubernetesController(BaseController): | ||||
|     """Manage deployment of outpost in kubernetes""" | ||||
|  | ||||
|     reconcilers: dict[str, Type[KubernetesObjectReconciler]] | ||||
|     reconcilers: dict[str, type[KubernetesObjectReconciler]] | ||||
|     reconcile_order: list[str] | ||||
|  | ||||
|     client: ApiClient | ||||
|     client: KubernetesClient | ||||
|     connection: KubernetesServiceConnection | ||||
|  | ||||
|     def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None: | ||||
|         super().__init__(outpost, connection) | ||||
|         self.client = connection.client() | ||||
|         self.client = KubernetesClient(connection) | ||||
|         self.reconcilers = { | ||||
|             "secret": SecretReconciler, | ||||
|             "deployment": DeploymentReconciler, | ||||
|  | ||||
							
								
								
									
										82
									
								
								authentik/outposts/docker_ssh.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								authentik/outposts/docker_ssh.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,82 @@ | ||||
| """Docker SSH helper""" | ||||
| import os | ||||
| from pathlib import Path | ||||
| from tempfile import gettempdir | ||||
|  | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
|  | ||||
| HEADER = "### Managed by authentik" | ||||
| FOOTER = "### End Managed by authentik" | ||||
|  | ||||
|  | ||||
| def opener(path, flags): | ||||
|     """File opener to create files as 700 perms""" | ||||
|     return os.open(path, flags, 0o700) | ||||
|  | ||||
|  | ||||
| class DockerInlineSSH: | ||||
|     """Create paramiko ssh config from CertificateKeyPair""" | ||||
|  | ||||
|     host: str | ||||
|     keypair: CertificateKeyPair | ||||
|  | ||||
|     key_path: str | ||||
|     config_path: Path | ||||
|     header: str | ||||
|  | ||||
|     def __init__(self, host: str, keypair: CertificateKeyPair) -> None: | ||||
|         self.host = host | ||||
|         self.keypair = keypair | ||||
|         self.config_path = Path("~/.ssh/config").expanduser() | ||||
|         self.header = f"{HEADER} - {self.host}\n" | ||||
|  | ||||
|     def write_config(self, key_path: str) -> bool: | ||||
|         """Update the local user's ssh config file""" | ||||
|         with open(self.config_path, "a+", encoding="utf-8") as ssh_config: | ||||
|             if self.header in ssh_config.readlines(): | ||||
|                 return False | ||||
|             ssh_config.writelines( | ||||
|                 [ | ||||
|                     self.header, | ||||
|                     f"Host {self.host}\n", | ||||
|                     f"    IdentityFile {key_path}\n", | ||||
|                     f"{FOOTER}\n", | ||||
|                     "\n", | ||||
|                 ] | ||||
|             ) | ||||
|         return True | ||||
|  | ||||
|     def write_key(self): | ||||
|         """Write keypair's private key to a temporary file""" | ||||
|         path = Path(gettempdir(), f"{self.keypair.pk}_private.pem") | ||||
|         with open(path, "w", encoding="utf8", opener=opener) as _file: | ||||
|             _file.write(self.keypair.key_data) | ||||
|         return str(path) | ||||
|  | ||||
|     def write(self): | ||||
|         """Write keyfile and update ssh config""" | ||||
|         self.key_path = self.write_key() | ||||
|         was_written = self.write_config(self.key_path) | ||||
|         if not was_written: | ||||
|             self.cleanup() | ||||
|  | ||||
|     def cleanup(self): | ||||
|         """Cleanup when we're done""" | ||||
|         try: | ||||
|             os.unlink(self.key_path) | ||||
|             with open(self.config_path, "r+", encoding="utf-8") as ssh_config: | ||||
|                 start = 0 | ||||
|                 end = 0 | ||||
|                 lines = ssh_config.readlines() | ||||
|                 for idx, line in enumerate(lines): | ||||
|                     if line == self.header: | ||||
|                         start = idx | ||||
|                     if start != 0 and line == f"{FOOTER}\n": | ||||
|                         end = idx | ||||
|             with open(self.config_path, "w+", encoding="utf-8") as ssh_config: | ||||
|                 lines = lines[:start] + lines[end + 2 :] | ||||
|                 ssh_config.writelines(lines) | ||||
|         except OSError: | ||||
|             # If we fail deleting a file it doesn't matter that much | ||||
|             # since we're just in a container | ||||
|             pass | ||||
| @ -1,4 +1,5 @@ | ||||
| """Create Docker TLSConfig from CertificateKeyPair""" | ||||
| from os import unlink | ||||
| from pathlib import Path | ||||
| from tempfile import gettempdir | ||||
| from typing import Optional | ||||
| @ -14,6 +15,8 @@ class DockerInlineTLS: | ||||
|     verification_kp: Optional[CertificateKeyPair] | ||||
|     authentication_kp: Optional[CertificateKeyPair] | ||||
|  | ||||
|     _paths: list[str] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         verification_kp: Optional[CertificateKeyPair], | ||||
| @ -21,14 +24,21 @@ class DockerInlineTLS: | ||||
|     ) -> None: | ||||
|         self.verification_kp = verification_kp | ||||
|         self.authentication_kp = authentication_kp | ||||
|         self._paths = [] | ||||
|  | ||||
|     def write_file(self, name: str, contents: str) -> str: | ||||
|         """Wrapper for mkstemp that uses fdopen""" | ||||
|         path = Path(gettempdir(), name) | ||||
|         with open(path, "w", encoding="utf8") as _file: | ||||
|             _file.write(contents) | ||||
|         self._paths.append(str(path)) | ||||
|         return str(path) | ||||
|  | ||||
|     def cleanup(self): | ||||
|         """Clean up certificates when we're done""" | ||||
|         for path in self._paths: | ||||
|             unlink(path) | ||||
|  | ||||
|     def write(self) -> TLSConfig: | ||||
|         """Create TLSConfig with Certificate Key pairs""" | ||||
|         # So yes, this is quite ugly. But sadly, there is no clean way to pass | ||||
|  | ||||
| @ -1,8 +1,7 @@ | ||||
| """Outpost models""" | ||||
| from dataclasses import asdict, dataclass, field | ||||
| from datetime import datetime | ||||
| from os import environ | ||||
| from typing import Iterable, Optional, Union | ||||
| from typing import Iterable, Optional | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from dacite import from_dict | ||||
| @ -11,23 +10,13 @@ from django.core.cache import cache | ||||
| from django.db import IntegrityError, models, transaction | ||||
| from django.db.models.base import Model | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from docker.client import DockerClient | ||||
| from docker.errors import DockerException | ||||
| from guardian.models import UserObjectPermission | ||||
| from guardian.shortcuts import assign_perm | ||||
| from kubernetes.client import VersionApi, VersionInfo | ||||
| from kubernetes.client.api_client import ApiClient | ||||
| from kubernetes.client.configuration import Configuration | ||||
| from kubernetes.client.exceptions import OpenApiException | ||||
| from kubernetes.config.config_exception import ConfigException | ||||
| from kubernetes.config.incluster_config import load_incluster_config | ||||
| from kubernetes.config.kube_config import load_kube_config_from_dict | ||||
| from model_utils.managers import InheritanceManager | ||||
| from packaging.version import LegacyVersion, Version, parse | ||||
| from structlog.stdlib import get_logger | ||||
| from urllib3.exceptions import HTTPError | ||||
|  | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik import __version__, get_build_hash | ||||
| from authentik.core.models import ( | ||||
|     USER_ATTRIBUTE_CAN_OVERRIDE_IP, | ||||
|     USER_ATTRIBUTE_SA, | ||||
| @ -44,7 +33,7 @@ from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.lib.utils.errors import exception_to_string | ||||
| from authentik.managed.models import ManagedModel | ||||
| from authentik.outposts.controllers.k8s.utils import get_namespace | ||||
| from authentik.outposts.docker_tls import DockerInlineTLS | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| OUR_VERSION = parse(__version__) | ||||
| OUTPOST_HELLO_INTERVAL = 10 | ||||
| @ -71,6 +60,7 @@ class OutpostConfig: | ||||
|  | ||||
|     docker_network: Optional[str] = field(default=None) | ||||
|     docker_map_ports: bool = field(default=True) | ||||
|     docker_labels: Optional[dict[str, str]] = field(default=None) | ||||
|  | ||||
|     container_image: Optional[str] = field(default=None) | ||||
|  | ||||
| @ -86,7 +76,7 @@ class OutpostConfig: | ||||
| class OutpostModel(Model): | ||||
|     """Base model for providers that need more objects than just themselves""" | ||||
|  | ||||
|     def get_required_objects(self) -> Iterable[Union[models.Model, str]]: | ||||
|     def get_required_objects(self) -> Iterable[models.Model | str]: | ||||
|         """Return a list of all required objects""" | ||||
|         return [self] | ||||
|  | ||||
| @ -149,10 +139,6 @@ class OutpostServiceConnection(models.Model): | ||||
|             return OutpostServiceConnectionState("", False) | ||||
|         return state | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         """Fetch current Service Connection state""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|         """Return component used to edit this object""" | ||||
| @ -210,35 +196,6 @@ class DockerServiceConnection(OutpostServiceConnection): | ||||
|     def __str__(self) -> str: | ||||
|         return f"Docker Service-Connection {self.name}" | ||||
|  | ||||
|     def client(self) -> DockerClient: | ||||
|         """Get DockerClient""" | ||||
|         try: | ||||
|             client = None | ||||
|             if self.local: | ||||
|                 client = DockerClient.from_env() | ||||
|             else: | ||||
|                 client = DockerClient( | ||||
|                     base_url=self.url, | ||||
|                     tls=DockerInlineTLS( | ||||
|                         verification_kp=self.tls_verification, | ||||
|                         authentication_kp=self.tls_authentication, | ||||
|                     ).write(), | ||||
|                 ) | ||||
|             client.containers.list() | ||||
|         except DockerException as exc: | ||||
|             LOGGER.warning(exc) | ||||
|             raise ServiceConnectionInvalid from exc | ||||
|         return client | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         try: | ||||
|             client = self.client() | ||||
|             return OutpostServiceConnectionState( | ||||
|                 version=client.info()["ServerVersion"], healthy=True | ||||
|             ) | ||||
|         except ServiceConnectionInvalid: | ||||
|             return OutpostServiceConnectionState(version="", healthy=False) | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         verbose_name = _("Docker Service-Connection") | ||||
| @ -265,27 +222,6 @@ class KubernetesServiceConnection(OutpostServiceConnection): | ||||
|     def __str__(self) -> str: | ||||
|         return f"Kubernetes Service-Connection {self.name}" | ||||
|  | ||||
|     def fetch_state(self) -> OutpostServiceConnectionState: | ||||
|         try: | ||||
|             client = self.client() | ||||
|             api_instance = VersionApi(client) | ||||
|             version: VersionInfo = api_instance.get_code() | ||||
|             return OutpostServiceConnectionState(version=version.git_version, healthy=True) | ||||
|         except (OpenApiException, HTTPError, ServiceConnectionInvalid): | ||||
|             return OutpostServiceConnectionState(version="", healthy=False) | ||||
|  | ||||
|     def client(self) -> ApiClient: | ||||
|         """Get Kubernetes client configured from kubeconfig""" | ||||
|         config = Configuration() | ||||
|         try: | ||||
|             if self.local: | ||||
|                 load_incluster_config(client_configuration=config) | ||||
|             else: | ||||
|                 load_kube_config_from_dict(self.kubeconfig, client_configuration=config) | ||||
|             return ApiClient(config) | ||||
|         except ConfigException as exc: | ||||
|             raise ServiceConnectionInvalid from exc | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         verbose_name = _("Kubernetes Service-Connection") | ||||
| @ -385,7 +321,8 @@ class Outpost(ManagedModel): | ||||
|                     user.user_permissions.add(permission.first()) | ||||
|         LOGGER.debug( | ||||
|             "Updated service account's permissions", | ||||
|             perms=UserObjectPermission.objects.filter(user=user), | ||||
|             obj_perms=UserObjectPermission.objects.filter(user=user), | ||||
|             perms=user.user_permissions.all(), | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
| @ -438,9 +375,9 @@ class Outpost(ManagedModel): | ||||
|             Token.objects.filter(identifier=self.token_identifier).delete() | ||||
|             return self.token | ||||
|  | ||||
|     def get_required_objects(self) -> Iterable[Union[models.Model, str]]: | ||||
|     def get_required_objects(self) -> Iterable[models.Model | str]: | ||||
|         """Get an iterator of all objects the user needs read access to""" | ||||
|         objects: list[Union[models.Model, str]] = [ | ||||
|         objects: list[models.Model | str] = [ | ||||
|             self, | ||||
|             "authentik_events.add_event", | ||||
|         ] | ||||
| @ -449,6 +386,10 @@ class Outpost(ManagedModel): | ||||
|                 objects.extend(provider.get_required_objects()) | ||||
|             else: | ||||
|                 objects.append(provider) | ||||
|         if self.managed: | ||||
|             for tenant in Tenant.objects.filter(web_certificate__isnull=False): | ||||
|                 objects.append(tenant) | ||||
|                 objects.append(tenant.web_certificate) | ||||
|         return objects | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
| @ -463,7 +404,7 @@ class OutpostState: | ||||
|     channel_ids: list[str] = field(default_factory=list) | ||||
|     last_seen: Optional[datetime] = field(default=None) | ||||
|     version: Optional[str] = field(default=None) | ||||
|     version_should: Union[Version, LegacyVersion] = field(default=OUR_VERSION) | ||||
|     version_should: Version | LegacyVersion = field(default=OUR_VERSION) | ||||
|     build_hash: str = field(default="") | ||||
|  | ||||
|     _outpost: Optional[Outpost] = field(default=None) | ||||
| @ -473,7 +414,7 @@ class OutpostState: | ||||
|         """Check if outpost version matches our version""" | ||||
|         if not self.version: | ||||
|             return False | ||||
|         if self.build_hash != environ.get(ENV_GIT_HASH_KEY, ""): | ||||
|         if self.build_hash != get_build_hash(): | ||||
|             return False | ||||
|         return parse(self.version) < OUR_VERSION | ||||
|  | ||||
| @ -481,6 +422,8 @@ class OutpostState: | ||||
|     def for_outpost(outpost: Outpost) -> list["OutpostState"]: | ||||
|         """Get all states for an outpost""" | ||||
|         keys = cache.keys(f"{outpost.state_cache_prefix}_*") | ||||
|         if not keys: | ||||
|             return [] | ||||
|         states = [] | ||||
|         for key in keys: | ||||
|             instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "") | ||||
|  | ||||
| @ -10,6 +10,7 @@ from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||
| from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save | ||||
| from authentik.tenants.models import Tenant | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| UPDATE_TRIGGERING_MODELS = ( | ||||
| @ -17,6 +18,7 @@ UPDATE_TRIGGERING_MODELS = ( | ||||
|     OutpostServiceConnection, | ||||
|     Provider, | ||||
|     CertificateKeyPair, | ||||
|     Tenant, | ||||
| ) | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -25,6 +25,8 @@ from authentik.events.monitored_tasks import ( | ||||
| ) | ||||
| from authentik.lib.utils.reflection import path_to_class | ||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | ||||
| from authentik.outposts.controllers.docker import DockerClient | ||||
| from authentik.outposts.controllers.kubernetes import KubernetesClient | ||||
| from authentik.outposts.models import ( | ||||
|     DockerServiceConnection, | ||||
|     KubernetesServiceConnection, | ||||
| @ -45,21 +47,21 @@ LOGGER = get_logger() | ||||
| CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s" | ||||
|  | ||||
|  | ||||
| def controller_for_outpost(outpost: Outpost) -> Optional[BaseController]: | ||||
| def controller_for_outpost(outpost: Outpost) -> Optional[type[BaseController]]: | ||||
|     """Get a controller for the outpost, when a service connection is defined""" | ||||
|     if not outpost.service_connection: | ||||
|         return None | ||||
|     service_connection = outpost.service_connection | ||||
|     if outpost.type == OutpostType.PROXY: | ||||
|         if isinstance(service_connection, DockerServiceConnection): | ||||
|             return ProxyDockerController(outpost, service_connection) | ||||
|             return ProxyDockerController | ||||
|         if isinstance(service_connection, KubernetesServiceConnection): | ||||
|             return ProxyKubernetesController(outpost, service_connection) | ||||
|             return ProxyKubernetesController | ||||
|     if outpost.type == OutpostType.LDAP: | ||||
|         if isinstance(service_connection, DockerServiceConnection): | ||||
|             return LDAPDockerController(outpost, service_connection) | ||||
|             return LDAPDockerController | ||||
|         if isinstance(service_connection, KubernetesServiceConnection): | ||||
|             return LDAPKubernetesController(outpost, service_connection) | ||||
|             return LDAPKubernetesController | ||||
|     return None | ||||
|  | ||||
|  | ||||
| @ -71,7 +73,16 @@ def outpost_service_connection_state(connection_pk: Any): | ||||
|     ) | ||||
|     if not connection: | ||||
|         return | ||||
|     state = connection.fetch_state() | ||||
|     if isinstance(connection, DockerServiceConnection): | ||||
|         cls = DockerClient | ||||
|     if isinstance(connection, KubernetesServiceConnection): | ||||
|         cls = KubernetesClient | ||||
|     try: | ||||
|         with cls(connection) as client: | ||||
|             state = client.fetch_state() | ||||
|     except ServiceConnectionInvalid as exc: | ||||
|         LOGGER.warning("Failed to get client status", exc=exc) | ||||
|         return | ||||
|     cache.set(connection.state_key, state, timeout=None) | ||||
|  | ||||
|  | ||||
| @ -114,14 +125,15 @@ def outpost_controller( | ||||
|         return | ||||
|     self.set_uid(slugify(outpost.name)) | ||||
|     try: | ||||
|         controller = controller_for_outpost(outpost) | ||||
|         if not controller: | ||||
|         controller_type = controller_for_outpost(outpost) | ||||
|         if not controller_type: | ||||
|             return | ||||
|         logs = getattr(controller, f"{action}_with_logs")() | ||||
|         LOGGER.debug("---------------Outpost Controller logs starting----------------") | ||||
|         for log in logs: | ||||
|             LOGGER.debug(log) | ||||
|         LOGGER.debug("-----------------Outpost Controller logs end-------------------") | ||||
|         with controller_type(outpost, outpost.service_connection) as controller: | ||||
|             logs = getattr(controller, f"{action}_with_logs")() | ||||
|             LOGGER.debug("---------------Outpost Controller logs starting----------------") | ||||
|             for log in logs: | ||||
|                 LOGGER.debug(log) | ||||
|             LOGGER.debug("-----------------Outpost Controller logs end-------------------") | ||||
|     except (ControllerException, ServiceConnectionInvalid) as exc: | ||||
|         self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc)) | ||||
|     else: | ||||
|  | ||||
							
								
								
									
										124
									
								
								authentik/outposts/tests/test_controller_docker.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								authentik/outposts/tests/test_controller_docker.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,124 @@ | ||||
| """Docker controller tests""" | ||||
| from django.test import TestCase | ||||
| from docker.models.containers import Container | ||||
|  | ||||
| from authentik.managed.manager import ObjectManager | ||||
| from authentik.outposts.controllers.base import ControllerException | ||||
| from authentik.outposts.controllers.docker import DockerController | ||||
| from authentik.outposts.managed import MANAGED_OUTPOST | ||||
| from authentik.outposts.models import DockerServiceConnection, Outpost, OutpostType | ||||
| from authentik.providers.proxy.controllers.docker import ProxyDockerController | ||||
|  | ||||
|  | ||||
| class DockerControllerTests(TestCase): | ||||
|     """Docker controller tests""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
|         self.outpost = Outpost.objects.create( | ||||
|             name="test", | ||||
|             type=OutpostType.PROXY, | ||||
|         ) | ||||
|         self.integration = DockerServiceConnection(name="test") | ||||
|         ObjectManager().run() | ||||
|  | ||||
|     def test_init_managed(self): | ||||
|         """Docker controller shouldn't do anything for managed outpost""" | ||||
|         controller = DockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         self.assertIsNone(controller.up()) | ||||
|         self.assertIsNone(controller.down()) | ||||
|  | ||||
|     def test_init_invalid(self): | ||||
|         """Ensure init fails with invalid client""" | ||||
|         with self.assertRaises(ControllerException): | ||||
|             DockerController(self.outpost, self.integration) | ||||
|  | ||||
|     def test_env_valid(self): | ||||
|         """Test environment check""" | ||||
|         controller = DockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         env = [f"{key}={value}" for key, value in controller._get_env().items()] | ||||
|         container = Container(attrs={"Config": {"Env": env}}) | ||||
|         self.assertFalse(controller._comp_env(container)) | ||||
|  | ||||
|     def test_env_invalid(self): | ||||
|         """Test environment check""" | ||||
|         controller = DockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         container = Container(attrs={"Config": {"Env": []}}) | ||||
|         self.assertTrue(controller._comp_env(container)) | ||||
|  | ||||
|     def test_label_valid(self): | ||||
|         """Test label check""" | ||||
|         controller = DockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         container = Container(attrs={"Config": {"Labels": controller._get_labels()}}) | ||||
|         self.assertFalse(controller._comp_labels(container)) | ||||
|  | ||||
|     def test_label_invalid(self): | ||||
|         """Test label check""" | ||||
|         controller = DockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         container = Container(attrs={"Config": {"Labels": {}}}) | ||||
|         self.assertTrue(controller._comp_labels(container)) | ||||
|         container = Container(attrs={"Config": {"Labels": {"io.goauthentik.outpost-uuid": "foo"}}}) | ||||
|         self.assertTrue(controller._comp_labels(container)) | ||||
|  | ||||
|     def test_port_valid(self): | ||||
|         """Test port check""" | ||||
|         controller = ProxyDockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         container = Container( | ||||
|             attrs={ | ||||
|                 "NetworkSettings": { | ||||
|                     "Ports": { | ||||
|                         "9000/tcp": [{"HostIp": "", "HostPort": "9000"}], | ||||
|                         "9443/tcp": [{"HostIp": "", "HostPort": "9443"}], | ||||
|                     } | ||||
|                 }, | ||||
|                 "State": "", | ||||
|             } | ||||
|         ) | ||||
|         with self.settings(TEST=False): | ||||
|             self.assertFalse(controller._comp_ports(container)) | ||||
|             container.attrs["State"] = "running" | ||||
|             self.assertFalse(controller._comp_ports(container)) | ||||
|  | ||||
|     def test_port_invalid(self): | ||||
|         """Test port check""" | ||||
|         controller = ProxyDockerController( | ||||
|             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||
|         ) | ||||
|         container_no_ports = Container( | ||||
|             attrs={"NetworkSettings": {"Ports": None}, "State": "running"} | ||||
|         ) | ||||
|         container_missing_port = Container( | ||||
|             attrs={ | ||||
|                 "NetworkSettings": { | ||||
|                     "Ports": { | ||||
|                         "9443/tcp": [{"HostIp": "", "HostPort": "9443"}], | ||||
|                     } | ||||
|                 }, | ||||
|                 "State": "running", | ||||
|             } | ||||
|         ) | ||||
|         container_mismatched_host = Container( | ||||
|             attrs={ | ||||
|                 "NetworkSettings": { | ||||
|                     "Ports": { | ||||
|                         "9443/tcp": [{"HostIp": "", "HostPort": "123"}], | ||||
|                     } | ||||
|                 }, | ||||
|                 "State": "running", | ||||
|             } | ||||
|         ) | ||||
|         with self.settings(TEST=False): | ||||
|             self.assertFalse(controller._comp_ports(container_no_ports)) | ||||
|             self.assertTrue(controller._comp_ports(container_missing_port)) | ||||
|             self.assertTrue(controller._comp_ports(container_mismatched_host)) | ||||
| @ -5,7 +5,7 @@ from typing import Iterator, Optional | ||||
|  | ||||
| from django.core.cache import cache | ||||
| from django.http import HttpRequest | ||||
| from prometheus_client import Histogram | ||||
| from prometheus_client import Gauge, Histogram | ||||
| from sentry_sdk.hub import Hub | ||||
| from sentry_sdk.tracing import Span | ||||
| from structlog.stdlib import BoundLogger, get_logger | ||||
| @ -14,13 +14,11 @@ from authentik.core.models import User | ||||
| from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode | ||||
| from authentik.policies.process import PolicyProcess, cache_key | ||||
| from authentik.policies.types import PolicyRequest, PolicyResult | ||||
| from authentik.root.monitoring import UpdatingGauge | ||||
|  | ||||
| CURRENT_PROCESS = current_process() | ||||
| GAUGE_POLICIES_CACHED = UpdatingGauge( | ||||
| GAUGE_POLICIES_CACHED = Gauge( | ||||
|     "authentik_policies_cached", | ||||
|     "Cached Policies", | ||||
|     update_func=lambda: len(cache.keys("policy_*") or []), | ||||
| ) | ||||
| HIST_POLICIES_BUILD_TIME = Histogram( | ||||
|     "authentik_policies_build_time", | ||||
|  | ||||
| @ -45,7 +45,7 @@ class HaveIBeenPwendPolicy(Policy): | ||||
|                 fields=request.context.keys(), | ||||
|             ) | ||||
|             return PolicyResult(False, _("Password not set in context")) | ||||
|         password = request.context[self.password_field] | ||||
|         password = str(request.context[self.password_field]) | ||||
|  | ||||
|         pw_hash = sha1(password.encode("utf-8")).hexdigest()  # nosec | ||||
|         url = f"https://api.pwnedpasswords.com/range/{pw_hash[:5]}" | ||||
|  | ||||
| @ -13,6 +13,7 @@ class PasswordPolicySerializer(PolicySerializer): | ||||
|         model = PasswordPolicy | ||||
|         fields = PolicySerializer.Meta.fields + [ | ||||
|             "password_field", | ||||
|             "amount_digits", | ||||
|             "amount_uppercase", | ||||
|             "amount_lowercase", | ||||
|             "amount_symbols", | ||||
|  | ||||
| @ -0,0 +1,38 @@ | ||||
| # Generated by Django 4.0 on 2021-12-18 14:54 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_policies_password", "0002_passwordpolicy_password_field"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="passwordpolicy", | ||||
|             name="amount_digits", | ||||
|             field=models.PositiveIntegerField(default=0), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="passwordpolicy", | ||||
|             name="amount_lowercase", | ||||
|             field=models.PositiveIntegerField(default=0), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="passwordpolicy", | ||||
|             name="amount_symbols", | ||||
|             field=models.PositiveIntegerField(default=0), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="passwordpolicy", | ||||
|             name="amount_uppercase", | ||||
|             field=models.PositiveIntegerField(default=0), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="passwordpolicy", | ||||
|             name="length_min", | ||||
|             field=models.PositiveIntegerField(default=0), | ||||
|         ), | ||||
|     ] | ||||
| @ -13,6 +13,7 @@ from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | ||||
| LOGGER = get_logger() | ||||
| RE_LOWER = re.compile("[a-z]") | ||||
| RE_UPPER = re.compile("[A-Z]") | ||||
| RE_DIGITS = re.compile("[0-9]") | ||||
|  | ||||
|  | ||||
| class PasswordPolicy(Policy): | ||||
| @ -23,10 +24,11 @@ class PasswordPolicy(Policy): | ||||
|         help_text=_("Field key to check, field keys defined in Prompt stages are available."), | ||||
|     ) | ||||
|  | ||||
|     amount_uppercase = models.IntegerField(default=0) | ||||
|     amount_lowercase = models.IntegerField(default=0) | ||||
|     amount_symbols = models.IntegerField(default=0) | ||||
|     length_min = models.IntegerField(default=0) | ||||
|     amount_digits = models.PositiveIntegerField(default=0) | ||||
|     amount_uppercase = models.PositiveIntegerField(default=0) | ||||
|     amount_lowercase = models.PositiveIntegerField(default=0) | ||||
|     amount_symbols = models.PositiveIntegerField(default=0) | ||||
|     length_min = models.PositiveIntegerField(default=0) | ||||
|     symbol_charset = models.TextField(default=r"!\"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ ") | ||||
|     error_message = models.TextField() | ||||
|  | ||||
| @ -40,6 +42,7 @@ class PasswordPolicy(Policy): | ||||
|     def component(self) -> str: | ||||
|         return "ak-policy-password-form" | ||||
|  | ||||
|     # pylint: disable=too-many-return-statements | ||||
|     def passes(self, request: PolicyRequest) -> PolicyResult: | ||||
|         if ( | ||||
|             self.password_field not in request.context | ||||
| @ -62,6 +65,9 @@ class PasswordPolicy(Policy): | ||||
|             LOGGER.debug("password failed", reason="length") | ||||
|             return PolicyResult(False, self.error_message) | ||||
|  | ||||
|         if self.amount_digits > 0 and len(RE_DIGITS.findall(password)) < self.amount_digits: | ||||
|             LOGGER.debug("password failed", reason="amount_digits") | ||||
|             return PolicyResult(False, self.error_message) | ||||
|         if self.amount_lowercase > 0 and len(RE_LOWER.findall(password)) < self.amount_lowercase: | ||||
|             LOGGER.debug("password failed", reason="amount_lowercase") | ||||
|             return PolicyResult(False, self.error_message) | ||||
|  | ||||
| @ -1,16 +1,14 @@ | ||||
| """Password flow tests""" | ||||
| from django.urls.base import reverse | ||||
| from django.utils.encoding import force_str | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.flows.challenge import ChallengeTypes | ||||
| from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding | ||||
| from authentik.flows.tests import FlowTestCase | ||||
| from authentik.policies.password.models import PasswordPolicy | ||||
| from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage | ||||
|  | ||||
|  | ||||
| class TestPasswordPolicyFlow(APITestCase): | ||||
| class TestPasswordPolicyFlow(FlowTestCase): | ||||
|     """Test Password Policy""" | ||||
|  | ||||
|     def setUp(self) -> None: | ||||
| @ -53,29 +51,22 @@ class TestPasswordPolicyFlow(APITestCase): | ||||
|             {"password": "akadmin"}, | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         self.assertJSONEqual( | ||||
|             force_str(response.content), | ||||
|             { | ||||
|                 "component": "ak-stage-prompt", | ||||
|                 "fields": [ | ||||
|                     { | ||||
|                         "field_key": "password", | ||||
|                         "label": "PASSWORD_LABEL", | ||||
|                         "order": 0, | ||||
|                         "placeholder": "PASSWORD_PLACEHOLDER", | ||||
|                         "required": True, | ||||
|                         "type": "password", | ||||
|                         "sub_text": "", | ||||
|                     } | ||||
|                 ], | ||||
|                 "flow_info": { | ||||
|                     "background": self.flow.background_url, | ||||
|                     "cancel_url": reverse("authentik_flows:cancel"), | ||||
|                     "title": "", | ||||
|                 }, | ||||
|                 "response_errors": { | ||||
|                     "non_field_errors": [{"code": "invalid", "string": self.policy.error_message}] | ||||
|                 }, | ||||
|                 "type": ChallengeTypes.NATIVE.value, | ||||
|         self.assertStageResponse( | ||||
|             response, | ||||
|             self.flow, | ||||
|             component="ak-stage-prompt", | ||||
|             fields=[ | ||||
|                 { | ||||
|                     "field_key": "password", | ||||
|                     "label": "PASSWORD_LABEL", | ||||
|                     "order": 0, | ||||
|                     "placeholder": "PASSWORD_PLACEHOLDER", | ||||
|                     "required": True, | ||||
|                     "type": "password", | ||||
|                     "sub_text": "", | ||||
|                 } | ||||
|             ], | ||||
|             response_errors={ | ||||
|                 "non_field_errors": [{"code": "invalid", "string": self.policy.error_message}] | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
| @ -13,6 +13,7 @@ class TestPasswordPolicy(TestCase): | ||||
|     def setUp(self) -> None: | ||||
|         self.policy = PasswordPolicy.objects.create( | ||||
|             name="test_false", | ||||
|             amount_digits=1, | ||||
|             amount_uppercase=1, | ||||
|             amount_lowercase=2, | ||||
|             amount_symbols=3, | ||||
| @ -38,7 +39,7 @@ class TestPasswordPolicy(TestCase): | ||||
|     def test_failed_lowercase(self): | ||||
|         """not enough lowercase""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context["password"] = "TTTTTTTTTTTTTTTTTTTTTTTe"  # nosec | ||||
|         request.context["password"] = "1TTTTTTTTTTTTTTTTTTTTTTe"  # nosec | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|         self.assertEqual(result.messages, ("test message",)) | ||||
| @ -46,15 +47,23 @@ class TestPasswordPolicy(TestCase): | ||||
|     def test_failed_uppercase(self): | ||||
|         """not enough uppercase""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context["password"] = "tttttttttttttttttttttttE"  # nosec | ||||
|         request.context["password"] = "1tttttttttttttttttttttE"  # nosec | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|         self.assertEqual(result.messages, ("test message",)) | ||||
|  | ||||
|     def test_failed_symbols(self): | ||||
|         """not enough uppercase""" | ||||
|         """not enough symbols""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context["password"] = "TETETETETETETETETETETETETe!!!"  # nosec | ||||
|         request.context["password"] = "1ETETETETETETETETETETETETe!!!"  # nosec | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|         self.assertEqual(result.messages, ("test message",)) | ||||
|  | ||||
|     def test_failed_digits(self): | ||||
|         """not enough digits""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context["password"] = "TETETETETETETETETETETE1e!!!"  # nosec | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertFalse(result.passing) | ||||
|         self.assertEqual(result.messages, ("test message",)) | ||||
| @ -62,7 +71,7 @@ class TestPasswordPolicy(TestCase): | ||||
|     def test_true(self): | ||||
|         """Positive password case""" | ||||
|         request = PolicyRequest(get_anonymous_user()) | ||||
|         request.context["password"] = generate_key() + "ee!!!"  # nosec | ||||
|         request.context["password"] = generate_key() + "1ee!!!"  # nosec | ||||
|         result: PolicyResult = self.policy.passes(request) | ||||
|         self.assertTrue(result.passing) | ||||
|         self.assertEqual(result.messages, tuple()) | ||||
|  | ||||
| @ -1,11 +1,11 @@ | ||||
| """Source API Views""" | ||||
| """Reputation policy API Views""" | ||||
| from rest_framework import mixins | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import GenericViewSet, ModelViewSet | ||||
|  | ||||
| from authentik.core.api.used_by import UsedByMixin | ||||
| from authentik.policies.api.policies import PolicySerializer | ||||
| from authentik.policies.reputation.models import IPReputation, ReputationPolicy, UserReputation | ||||
| from authentik.policies.reputation.models import Reputation, ReputationPolicy | ||||
|  | ||||
|  | ||||
| class ReputationPolicySerializer(PolicySerializer): | ||||
| @ -29,59 +29,32 @@ class ReputationPolicyViewSet(UsedByMixin, ModelViewSet): | ||||
|     ordering = ["name"] | ||||
|  | ||||
|  | ||||
| class IPReputationSerializer(ModelSerializer): | ||||
|     """IPReputation Serializer""" | ||||
| class ReputationSerializer(ModelSerializer): | ||||
|     """Reputation Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = IPReputation | ||||
|         model = Reputation | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "identifier", | ||||
|             "ip", | ||||
|             "ip_geo_data", | ||||
|             "score", | ||||
|             "updated", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class IPReputationViewSet( | ||||
| class ReputationViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """IPReputation Viewset""" | ||||
|     """Reputation Viewset""" | ||||
|  | ||||
|     queryset = IPReputation.objects.all() | ||||
|     serializer_class = IPReputationSerializer | ||||
|     search_fields = ["ip", "score"] | ||||
|     filterset_fields = ["ip", "score"] | ||||
|     queryset = Reputation.objects.all() | ||||
|     serializer_class = ReputationSerializer | ||||
|     search_fields = ["identifier", "ip", "score"] | ||||
|     filterset_fields = ["identifier", "ip", "score"] | ||||
|     ordering = ["ip"] | ||||
|  | ||||
|  | ||||
| class UserReputationSerializer(ModelSerializer): | ||||
|     """UserReputation Serializer""" | ||||
|  | ||||
|     class Meta: | ||||
|         model = UserReputation | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "username", | ||||
|             "score", | ||||
|             "updated", | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class UserReputationViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     UsedByMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """UserReputation Viewset""" | ||||
|  | ||||
|     queryset = UserReputation.objects.all() | ||||
|     serializer_class = UserReputationSerializer | ||||
|     search_fields = ["username", "score"] | ||||
|     filterset_fields = ["username", "score"] | ||||
|     ordering = ["username"] | ||||
|  | ||||
| @ -13,3 +13,4 @@ class AuthentikPolicyReputationConfig(AppConfig): | ||||
|  | ||||
|     def ready(self): | ||||
|         import_module("authentik.policies.reputation.signals") | ||||
|         import_module("authentik.policies.reputation.tasks") | ||||
|  | ||||
| @ -0,0 +1,40 @@ | ||||
| # Generated by Django 4.0.1 on 2022-01-05 18:56 | ||||
|  | ||||
| import uuid | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_policies_reputation", "0002_auto_20210529_2046"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="Reputation", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "reputation_uuid", | ||||
|                     models.UUIDField( | ||||
|                         default=uuid.uuid4, primary_key=True, serialize=False, unique=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("identifier", models.TextField()), | ||||
|                 ("ip", models.GenericIPAddressField()), | ||||
|                 ("ip_geo_data", models.JSONField(default=dict)), | ||||
|                 ("score", models.BigIntegerField(default=0)), | ||||
|                 ("updated", models.DateTimeField(auto_now_add=True)), | ||||
|             ], | ||||
|             options={ | ||||
|                 "unique_together": {("identifier", "ip")}, | ||||
|             }, | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="IPReputation", | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="UserReputation", | ||||
|         ), | ||||
|     ] | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	