Compare commits
	
		
			422 Commits
		
	
	
		
			version/20
			...
			version-20
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| c15e4b24a1 | |||
| b6f518ffe6 | |||
| 4e476fd4e9 | |||
| 03503363e5 | |||
| 22d6621b02 | |||
| 0023df64c8 | |||
| 59a259e43a | |||
| c6f39f5eb4 | |||
| e3c0aad48a | |||
| 91dd33cee6 | |||
| 5a2c367e89 | |||
| 3b05c9cb1a | |||
| 6e53f1689d | |||
| e3be0f2550 | |||
| 294f2243c1 | |||
| 7b1373e8d6 | |||
| e70b486f20 | |||
| b90174f153 | |||
| 7d7acd8494 | |||
| 4d9d7c5efb | |||
| d614b3608d | |||
| beb2715fa7 | |||
| 5769ff45b5 | |||
| 9d6f79558f | |||
| 41d5bff9d3 | |||
| ec84ba9b6d | |||
| 042a62f99e | |||
| 907f02cfee | |||
| 53fe412bf9 | |||
| ef9e177fe9 | |||
| 28e675596b | |||
| 9b7f57cc75 | |||
| 935a8f4d58 | |||
| 01fcbb325b | |||
| 7d3d17acb9 | |||
| e434321f7c | |||
| ebd476be14 | |||
| 31ba543c62 | |||
| a101d48b5a | |||
| 4c166dcf52 | |||
| 47b1f025e1 | |||
| 8f44c792ac | |||
| e57b6f2347 | |||
| 275d0dfd03 | |||
| f18cbace7a | |||
| 212220554f | |||
| a596392bc3 | |||
| 3e22740eac | |||
| d18a691f63 | |||
| 3cd5e68bc1 | |||
| c741c13132 | |||
| 924f6f104a | |||
| 454594025b | |||
| e72097292c | |||
| ab17a12184 | |||
| 776f3f69a5 | |||
| 8560c7150a | |||
| 301386fb4a | |||
| 68e8b6990b | |||
| 4f800c4758 | |||
| 90c31c2214 | |||
| 50e3d317b2 | |||
| 3eed7bb010 | |||
| 0ef8edc9f1 | |||
| a6373ebb33 | |||
| bf8ce55eea | |||
| 61b4fcb5f3 | |||
| 81275e3bd1 | |||
| 7988bf7748 | |||
| 00d8eec360 | |||
| 82150c8e84 | |||
| 1dbd749a74 | |||
| a96479f16c | |||
| 5d5fb1f37e | |||
| b6f4d6a5eb | |||
| 8ab5c04c2c | |||
| 386944117e | |||
| 9154b9b85d | |||
| fc19372709 | |||
| e5d9c6537c | |||
| bf5cbac314 | |||
| 5cca637a3d | |||
| 5bfb8b454b | |||
| 4d96437972 | |||
| d03b0b8152 | |||
| c249b55ff5 | |||
| 1e1876b34c | |||
| a27493ad1b | |||
| 95b1ab820e | |||
| 5cf9f0002b | |||
| fc7a452b0c | |||
| 25ee0e4b45 | |||
| 46f12e62e8 | |||
| 4245dea25a | |||
| 908db3df81 | |||
| ef4f9aa437 | |||
| 902dd83c67 | |||
| 1c4b78b5f4 | |||
| d854d819d1 | |||
| f246da6b73 | |||
| 4a56b5e827 | |||
| 53b10e64f8 | |||
| 27e4c7027c | |||
| 410d1b97cd | |||
| f93f7e635b | |||
| 74eba04735 | |||
| 01bdaffe36 | |||
| f6b556713a | |||
| abe38bb16a | |||
| f2b8d45999 | |||
| 3f61dff1cb | |||
| b19da6d774 | |||
| 7c55616e29 | |||
| 952a7f07c1 | |||
| 6510b97c1e | |||
| 19b707a0fb | |||
| 320a600349 | |||
| 10110deae5 | |||
| 884c546f32 | |||
| abec906677 | |||
| 22d1dd801c | |||
| 03891cbe09 | |||
| 3c5157dfd4 | |||
| d241e8d51d | |||
| 7ba15884ed | |||
| 47356915b1 | |||
| 2520c92b78 | |||
| e7e0e6d213 | |||
| ca0250e19f | |||
| cf4c7c1bcb | |||
| 670af8789a | |||
| 5c5634830f | |||
| b6b0edb7ad | |||
| 45440abc80 | |||
| 9c42b75567 | |||
| e9a477c1eb | |||
| fa60655a5d | |||
| 5d729b4878 | |||
| 8692f7233f | |||
| 457e17fec3 | |||
| 87e99625e6 | |||
| 6f32eeea43 | |||
| dfcf8b2d40 | |||
| 846006f2e3 | |||
| f557b2129f | |||
| 6dc2003e34 | |||
| 0149c89003 | |||
| f458cae954 | |||
| f01d117ce6 | |||
| 2bde43e5dc | |||
| 84cc0b5490 | |||
| 2f3026084e | |||
| 89696edbee | |||
| c1f0833c09 | |||
| c77f804b77 | |||
| 8e83209631 | |||
| 2e48e0cc2f | |||
| e72f0ab160 | |||
| a3c681cc44 | |||
| 5b3a9e29fb | |||
| 15803dc67d | |||
| ff37e064c9 | |||
| ef8e922e2a | |||
| 34b11524f1 | |||
| 9e2492be5c | |||
| b3ba083ff0 | |||
| 22a8603892 | |||
| d83d058a4b | |||
| ec3fd4a3ab | |||
| 0764668b14 | |||
| 16b6c17305 | |||
| e60509697a | |||
| 85364af9e9 | |||
| cf4b4030aa | |||
| 74dc025869 | |||
| cabdc53553 | |||
| 29e9f399bd | |||
| dad43017a0 | |||
| 7fb939f97b | |||
| 88859b1c26 | |||
| c78236a2a2 | |||
| ba55538a34 | |||
| f742c73e24 | |||
| ca314c262c | |||
| b932b6c963 | |||
| 3c048a1921 | |||
| 8a60a7e26f | |||
| f10b57ba0b | |||
| e53114a645 | |||
| 2e50532518 | |||
| 1936ddfecb | |||
| 4afef46cb8 | |||
| 92b4244e81 | |||
| dfbf7027bc | |||
| eca2ef20d0 | |||
| cac5c7b3ea | |||
| 37ee555c8e | |||
| f910da0f8a | |||
| fc9d270992 | |||
| dcbc3d788a | |||
| 4658018a90 | |||
| 577b7ee515 | |||
| 621773c1ea | |||
| 3da526f20e | |||
| 052e465041 | |||
| c843f18743 | |||
| 80d0b14bb8 | |||
| 68637cf7cf | |||
| 82acba26af | |||
| ff8a812823 | |||
| 7f5fed2aea | |||
| a5c30fd9c7 | |||
| ef23a0da52 | |||
| ba527e7141 | |||
| 8edc254ab5 | |||
| 42627d21b0 | |||
| 2479b157d0 | |||
| 602573f83f | |||
| 20c33fa011 | |||
| 8599d9efe0 | |||
| 8e6fcfe350 | |||
| 558aa45201 | |||
| e9910732bc | |||
| 246dd4b062 | |||
| 4425f8d183 | |||
| c410bb8c36 | |||
| 44f62a4773 | |||
| b6ff04694f | |||
| d4ce0e8e41 | |||
| 362d72da8c | |||
| 88d0f8d8a8 | |||
| 61097b9400 | |||
| 7a73ddfb60 | |||
| d66f13c249 | |||
| 8cc3cb6a42 | |||
| 4c5537ddfe | |||
| a95779157d | |||
| 70256727fd | |||
| ac6afb2b82 | |||
| 2ea7bd86e8 | |||
| 95bce9c9e7 | |||
| 71a22c2a34 | |||
| f3eb85877d | |||
| 273f5211a0 | |||
| db06428ab9 | |||
| 109d8e48d4 | |||
| 2ca115285c | |||
| f5459645a5 | |||
| 14c159500d | |||
| 03da87991f | |||
| e38ee9c580 | |||
| 3bf53b2db1 | |||
| f33190caa5 | |||
| 741822424a | |||
| 0ca6fbb224 | |||
| f72b652b24 | |||
| 0a2c1eb419 | |||
| eb9593a847 | |||
| 7c71c52791 | |||
| 59493c02c4 | |||
| 83089b47d3 | |||
| 103e723d8c | |||
| 7d6e88061f | |||
| f8aab40e3e | |||
| 5123bc1316 | |||
| 30e8408e85 | |||
| bb34474101 | |||
| a105760123 | |||
| f410a77010 | |||
| 6ff8fdcc49 | |||
| 50ca3dc772 | |||
| 2a09fc0ae2 | |||
| fbb6756488 | |||
| f45fb2eac0 | |||
| 7b8cde17e6 | |||
| 186634fc67 | |||
| c84b1b7997 | |||
| 6e83467481 | |||
| 72db17f23b | |||
| ee4e176039 | |||
| e18e681c2b | |||
| 10fe67e08d | |||
| fc1db83be7 | |||
| 3740e65906 | |||
| 30386cd899 | |||
| 64a10e9a46 | |||
| 77d6242cce | |||
| 9a86dcaec3 | |||
| 0b00768b84 | |||
| d162c79373 | |||
| 05db352a0f | |||
| 5bf3d7fe02 | |||
| 1ae1cbebf4 | |||
| 8c16dfc478 | |||
| c6a3286e4c | |||
| 44cfd7e5b0 | |||
| 210d4c5058 | |||
| 6b39d616b1 | |||
| 32ace1bece | |||
| 54f893b84f | |||
| b5685ec072 | |||
| 5854833240 | |||
| 4b2437a6f1 | |||
| 2981ac7b10 | |||
| 59a51c859a | |||
| 47bab6c182 | |||
| 4e6714fffe | |||
| aa6b595545 | |||
| 0131b1f6cc | |||
| 9f53c359dd | |||
| 28e4dba3e8 | |||
| 2afd46e1df | |||
| f5991b19be | |||
| 5cc75cb25c | |||
| 68c1df2d39 | |||
| c83724f45c | |||
| 5f91c150df | |||
| 0bfe999442 | |||
| 58440b16c4 | |||
| 57757a2ff5 | |||
| 2993f506a7 | |||
| e4841d54a1 | |||
| 4f05dcec89 | |||
| ede6bcd31e | |||
| 728c8e994d | |||
| 5290b64415 | |||
| fec6de1ba2 | |||
| 69678dcfa6 | |||
| 4911a243ff | |||
| 70316b37da | |||
| 307cb94e3b | |||
| ace53a8fa5 | |||
| 0544dc3f83 | |||
| 708ff300a3 | |||
| 4e63f0f215 | |||
| 141481df3a | |||
| 29241cc287 | |||
| e81e97d404 | |||
| a5182e5c24 | |||
| cf5ff6e160 | |||
| f2b3a2ec91 | |||
| 69780c67a9 | |||
| ac9cf590bc | |||
| cb6edcb198 | |||
| 8eecc28c3c | |||
| 10b16bc36a | |||
| 2fe88cfea9 | |||
| caab396b56 | |||
| 5f0f4284a2 | |||
| c11be2284d | |||
| aa321196d7 | |||
| ff03db61a8 | |||
| f3b3ce6572 | |||
| 09b02e1aec | |||
| 451a9aaf01 | |||
| eaee7cb562 | |||
| a010c91a52 | |||
| 709194330f | |||
| 5914bbf173 | |||
| 5e9166f859 | |||
| 35b8ef6592 | |||
| 772a939f17 | |||
| 24971801cf | |||
| 43aebe8cb2 | |||
| 19cfc87c84 | |||
| f920f183c8 | |||
| 97f979c81e | |||
| e61411d396 | |||
| c4f985f542 | |||
| 302dee7ab2 | |||
| 83c12ad483 | |||
| 4224fd5c6f | |||
| 597ce1eb42 | |||
| 5ef385f0bb | |||
| cda4be3d47 | |||
| 8cdf22fc94 | |||
| 6efc7578ef | |||
| 4e2457560d | |||
| 2ddf122d27 | |||
| a24651437a | |||
| 30bb7acb17 | |||
| 7859145138 | |||
| 8a8aafec81 | |||
| deebdf2bcc | |||
| 4982c4abcb | |||
| 1486f90077 | |||
| f4988bc45e | |||
| 8abc9cc031 | |||
| 534689895c | |||
| 8a0dd6be24 | |||
| 65d2eed82d | |||
| e450e7b107 | |||
| 552ddda909 | |||
| bafeff7306 | |||
| 6791436302 | |||
| 7eda794070 | |||
| e3129c1067 | |||
| ff481ba6e7 | |||
| a106bad2db | |||
| 3a1c311d02 | |||
| 6465333f4f | |||
| b761659227 | |||
| 9321c355f8 | |||
| 86c8e79ea1 | |||
| 8916b1f8ab | |||
| 41fcf2aba6 | |||
| 87e72b08a9 | |||
| b2fcd42e3c | |||
| fc1b47a80f | |||
| af14e3502e | |||
| a2faa5ceb5 | |||
| 63a19a1381 | |||
| b472dcb7e7 | |||
| 6303909031 | |||
| 4bdc06865b | |||
| 2ee48cd039 | |||
| 893d5f452b | |||
| 340a9bc8ee | |||
| cb3d9f83f1 | |||
| 4ba55aa8e9 | |||
| bab6f501ec | |||
| 7327939684 | 
| @ -1,5 +1,5 @@ | |||||||
| [bumpversion] | [bumpversion] | ||||||
| current_version = 2021.12.1-rc3 | current_version = 2021.12.5 | ||||||
| tag = True | tag = True | ||||||
| commit = True | commit = True | ||||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*) | parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*) | ||||||
| @ -17,7 +17,7 @@ values = | |||||||
| 	beta | 	beta | ||||||
| 	stable | 	stable | ||||||
|  |  | ||||||
| [bumpversion:file:website/docs/installation/docker-compose.md] | [bumpversion:file:pyproject.toml] | ||||||
|  |  | ||||||
| [bumpversion:file:docker-compose.yml] | [bumpversion:file:docker-compose.yml] | ||||||
|  |  | ||||||
| @ -30,7 +30,3 @@ values = | |||||||
| [bumpversion:file:internal/constants/constants.go] | [bumpversion:file:internal/constants/constants.go] | ||||||
|  |  | ||||||
| [bumpversion:file:web/src/constants.ts] | [bumpversion:file:web/src/constants.ts] | ||||||
|  |  | ||||||
| [bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md] |  | ||||||
|  |  | ||||||
| [bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md] |  | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
								
							| @ -7,6 +7,7 @@ exemptLabels: | |||||||
|   - pinned |   - pinned | ||||||
|   - security |   - security | ||||||
|   - pr_wanted |   - pr_wanted | ||||||
|  |   - enhancement/confirmed | ||||||
| # Comment to post when marking an issue as stale. Set to `false` to disable | # Comment to post when marking an issue as stale. Set to `false` to disable | ||||||
| markComment: > | markComment: > | ||||||
|   This issue has been automatically marked as stale because it has not had |   This issue has been automatically marked as stale because it has not had | ||||||
|  | |||||||
							
								
								
									
										158
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										158
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							| @ -33,40 +33,36 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |  | ||||||
|           python-version: '3.9' |  | ||||||
|       - uses: actions/setup-node@v2 |       - uses: actions/setup-node@v2 | ||||||
|         with: |         with: | ||||||
|           node-version: '16' |           node-version: '16' | ||||||
|       - id: cache-pipenv |       - id: cache-poetry | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: scripts/ci_prepare.sh |         run: scripts/ci_prepare.sh | ||||||
|       - name: run pylint |       - name: run job | ||||||
|         run: pipenv run make ci-${{ matrix.job }} |         run: poetry run make ci-${{ matrix.job }} | ||||||
|   test-migrations: |   test-migrations: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |       - id: cache-poetry | ||||||
|           python-version: '3.9' |  | ||||||
|       - id: cache-pipenv |  | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: scripts/ci_prepare.sh |         run: scripts/ci_prepare.sh | ||||||
|       - name: run migrations |       - name: run migrations | ||||||
|         run: pipenv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-migrations-from-stable: |   test-migrations-from-stable: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
| @ -74,71 +70,79 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |  | ||||||
|           python-version: '3.9' |  | ||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         id: ev |         id: ev | ||||||
|         run: | |         run: | | ||||||
|           python ./scripts/gh_env.py |           python ./scripts/gh_env.py | ||||||
|       - id: cache-pipenv |           sudo pip install -U pipenv | ||||||
|  |       - id: cache-poetry | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: checkout stable |       - name: checkout stable | ||||||
|         run: | |         run: | | ||||||
|           # Copy current, latest config to local |           # Copy current, latest config to local | ||||||
|           cp authentik/lib/default.yml local.env.yml |           cp authentik/lib/default.yml local.env.yml | ||||||
|           cp -R .github .. |           cp -R .github .. | ||||||
|           cp -R scripts .. |           cp -R scripts .. | ||||||
|  |           cp -R poetry.lock pyproject.toml .. | ||||||
|           git checkout $(git describe --abbrev=0 --match 'version/*') |           git checkout $(git describe --abbrev=0 --match 'version/*') | ||||||
|           rm -rf .github/ scripts/ |           rm -rf .github/ scripts/ | ||||||
|           mv ../.github ../scripts . |           mv ../.github ../scripts ../poetry.lock ../pyproject.toml . | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: | |         run: | | ||||||
|           scripts/ci_prepare.sh |           scripts/ci_prepare.sh | ||||||
|           # Sync anyways since stable will have different dependencies |           # Sync anyways since stable will have different dependencies | ||||||
|           pipenv sync --dev |           # TODO: Remove after next stable release | ||||||
|  |           if [[ -f "Pipfile.lock" ]]; then | ||||||
|  |             pipenv install --dev | ||||||
|  |           fi | ||||||
|  |           poetry install | ||||||
|       - name: run migrations to stable |       - name: run migrations to stable | ||||||
|         run: pipenv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|       - name: checkout current code |       - name: checkout current code | ||||||
|         run: | |         run: | | ||||||
|           set -x |           set -x | ||||||
|           git fetch |           git fetch | ||||||
|           git reset --hard HEAD |           git reset --hard HEAD | ||||||
|           git checkout $GITHUB_HEAD_REF |           # TODO: Remove after next stable release | ||||||
|           pipenv sync --dev |           rm -f poetry.lock | ||||||
|  |           git checkout $GITHUB_SHA | ||||||
|  |           # TODO: Remove after next stable release | ||||||
|  |           if [[ -f "Pipfile.lock" ]]; then | ||||||
|  |             pipenv install --dev | ||||||
|  |           fi | ||||||
|  |           poetry install | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: scripts/ci_prepare.sh |         run: scripts/ci_prepare.sh | ||||||
|       - name: migrate to latest |       - name: migrate to latest | ||||||
|         run: pipenv run python -m lifecycle.migrate |         run: poetry run python -m lifecycle.migrate | ||||||
|   test-unittest: |   test-unittest: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |       - id: cache-poetry | ||||||
|           python-version: '3.9' |  | ||||||
|       - id: cache-pipenv |  | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: scripts/ci_prepare.sh |         run: scripts/ci_prepare.sh | ||||||
|       - uses: testspace-com/setup-testspace@v1 |       - uses: testspace-com/setup-testspace@v1 | ||||||
|         with: |         with: | ||||||
|           domain: ${{github.repository_owner}} |           domain: ${{github.repository_owner}} | ||||||
|       - name: run unittest |       - name: run unittest | ||||||
|         run: | |         run: | | ||||||
|           pipenv run make test |           poetry run make test | ||||||
|           pipenv run coverage xml |           poetry run coverage xml | ||||||
|       - name: run testspace |       - name: run testspace | ||||||
|         if: ${{ always() }} |         if: ${{ always() }} | ||||||
|         run: | |         run: | | ||||||
| @ -150,16 +154,14 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |       - id: cache-poetry | ||||||
|           python-version: '3.9' |  | ||||||
|       - id: cache-pipenv |  | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: scripts/ci_prepare.sh |         run: scripts/ci_prepare.sh | ||||||
|       - uses: testspace-com/setup-testspace@v1 |       - uses: testspace-com/setup-testspace@v1 | ||||||
|         with: |         with: | ||||||
| @ -168,21 +170,19 @@ jobs: | |||||||
|         uses: helm/kind-action@v1.2.0 |         uses: helm/kind-action@v1.2.0 | ||||||
|       - name: run integration |       - name: run integration | ||||||
|         run: | |         run: | | ||||||
|           pipenv run make test-integration |           poetry run make test-integration | ||||||
|           pipenv run coverage xml |           poetry run coverage xml | ||||||
|       - name: run testspace |       - name: run testspace | ||||||
|         if: ${{ always() }} |         if: ${{ always() }} | ||||||
|         run: | |         run: | | ||||||
|           testspace [integration]unittest.xml --link=codecov |           testspace [integration]unittest.xml --link=codecov | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v2 |         uses: codecov/codecov-action@v2 | ||||||
|   test-e2e: |   test-e2e-provider: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |  | ||||||
|           python-version: '3.9' |  | ||||||
|       - uses: actions/setup-node@v2 |       - uses: actions/setup-node@v2 | ||||||
|         with: |         with: | ||||||
|           node-version: '16' |           node-version: '16' | ||||||
| @ -191,14 +191,14 @@ jobs: | |||||||
|       - uses: testspace-com/setup-testspace@v1 |       - uses: testspace-com/setup-testspace@v1 | ||||||
|         with: |         with: | ||||||
|           domain: ${{github.repository_owner}} |           domain: ${{github.repository_owner}} | ||||||
|       - id: cache-pipenv |       - id: cache-poetry | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: | |         run: | | ||||||
|           scripts/ci_prepare.sh |           scripts/ci_prepare.sh | ||||||
|           docker-compose -f tests/e2e/docker-compose.yml up -d |           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||||
| @ -215,12 +215,57 @@ jobs: | |||||||
|           npm run build |           npm run build | ||||||
|       - name: run e2e |       - name: run e2e | ||||||
|         run: | |         run: | | ||||||
|           pipenv run make test-e2e |           poetry run make test-e2e-provider | ||||||
|           pipenv run coverage xml |           poetry run coverage xml | ||||||
|       - name: run testspace |       - name: run testspace | ||||||
|         if: ${{ always() }} |         if: ${{ always() }} | ||||||
|         run: | |         run: | | ||||||
|           testspace [e2e]unittest.xml --link=codecov |           testspace [e2e-provider]unittest.xml --link=codecov | ||||||
|  |       - if: ${{ always() }} | ||||||
|  |         uses: codecov/codecov-action@v2 | ||||||
|  |   test-e2e-rest: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v2 | ||||||
|  |       - uses: actions/setup-python@v2 | ||||||
|  |       - uses: actions/setup-node@v2 | ||||||
|  |         with: | ||||||
|  |           node-version: '16' | ||||||
|  |           cache: 'npm' | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - uses: testspace-com/setup-testspace@v1 | ||||||
|  |         with: | ||||||
|  |           domain: ${{github.repository_owner}} | ||||||
|  |       - id: cache-poetry | ||||||
|  |         uses: actions/cache@v2.1.7 | ||||||
|  |         with: | ||||||
|  |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|  |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|  |       - name: prepare | ||||||
|  |         env: | ||||||
|  |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|  |         run: | | ||||||
|  |           scripts/ci_prepare.sh | ||||||
|  |           docker-compose -f tests/e2e/docker-compose.yml up -d | ||||||
|  |       - id: cache-web | ||||||
|  |         uses: actions/cache@v2.1.7 | ||||||
|  |         with: | ||||||
|  |           path: web/dist | ||||||
|  |           key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }} | ||||||
|  |       - name: prepare web ui | ||||||
|  |         if: steps.cache-web.outputs.cache-hit != 'true' | ||||||
|  |         run: | | ||||||
|  |           cd web | ||||||
|  |           npm i | ||||||
|  |           npm run build | ||||||
|  |       - name: run e2e | ||||||
|  |         run: | | ||||||
|  |           poetry run make test-e2e-rest | ||||||
|  |           poetry run coverage xml | ||||||
|  |       - name: run testspace | ||||||
|  |         if: ${{ always() }} | ||||||
|  |         run: | | ||||||
|  |           testspace [e2e-rest]unittest.xml --link=codecov | ||||||
|       - if: ${{ always() }} |       - if: ${{ always() }} | ||||||
|         uses: codecov/codecov-action@v2 |         uses: codecov/codecov-action@v2 | ||||||
|   ci-core-mark: |   ci-core-mark: | ||||||
| @ -230,7 +275,8 @@ jobs: | |||||||
|       - test-migrations-from-stable |       - test-migrations-from-stable | ||||||
|       - test-unittest |       - test-unittest | ||||||
|       - test-integration |       - test-integration | ||||||
|       - test-e2e |       - test-e2e-rest | ||||||
|  |       - test-e2e-provider | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - run: echo mark |       - run: echo mark | ||||||
| @ -252,7 +298,7 @@ jobs: | |||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |         env: | ||||||
|           DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }} |           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||||
|         run: | |         run: | | ||||||
|           python ./scripts/gh_env.py |           python ./scripts/gh_env.py | ||||||
|       - name: Login to Container Registry |       - name: Login to Container Registry | ||||||
|  | |||||||
							
								
								
									
										42
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										42
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							| @ -17,7 +17,7 @@ jobs: | |||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-go@v2 |       - uses: actions/setup-go@v2 | ||||||
|         with: |         with: | ||||||
|           go-version: '^1.16.3' |           go-version: "^1.17" | ||||||
|       - name: Run linter |       - name: Run linter | ||||||
|         run: | |         run: | | ||||||
|           # Create folder structure for go embeds |           # Create folder structure for go embeds | ||||||
| @ -58,7 +58,7 @@ jobs: | |||||||
|       - name: prepare variables |       - name: prepare variables | ||||||
|         id: ev |         id: ev | ||||||
|         env: |         env: | ||||||
|           DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }} |           DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} | ||||||
|         run: | |         run: | | ||||||
|           python ./scripts/gh_env.py |           python ./scripts/gh_env.py | ||||||
|       - name: Login to Container Registry |       - name: Login to Container Registry | ||||||
| @ -80,3 +80,41 @@ jobs: | |||||||
|           build-args: | |           build-args: | | ||||||
|             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} |             GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} | ||||||
|           platforms: ${{ matrix.arch }} |           platforms: ${{ matrix.arch }} | ||||||
|  |   build-outpost-binary: | ||||||
|  |     timeout-minutes: 120 | ||||||
|  |     needs: | ||||||
|  |       - ci-outpost-mark | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         type: | ||||||
|  |           - proxy | ||||||
|  |           - ldap | ||||||
|  |         goos: [linux] | ||||||
|  |         goarch: [amd64, arm64] | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v2 | ||||||
|  |       - uses: actions/setup-go@v2 | ||||||
|  |         with: | ||||||
|  |           go-version: "^1.17" | ||||||
|  |       - uses: actions/setup-node@v2 | ||||||
|  |         with: | ||||||
|  |           node-version: '16' | ||||||
|  |           cache: 'npm' | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - name: Build web | ||||||
|  |         run: | | ||||||
|  |           cd web | ||||||
|  |           npm install | ||||||
|  |           npm run build-proxy | ||||||
|  |       - name: Build outpost | ||||||
|  |         run: | | ||||||
|  |           set -x | ||||||
|  |           export GOOS=${{ matrix.goos }} | ||||||
|  |           export GOARCH=${{ matrix.goarch }} | ||||||
|  |           go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} | ||||||
|  |       - uses: actions/upload-artifact@v2 | ||||||
|  |         with: | ||||||
|  |           name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|  |           path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|  | |||||||
							
								
								
									
										73
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										73
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							| @ -30,14 +30,14 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           push: ${{ github.event_name == 'release' }} |           push: ${{ github.event_name == 'release' }} | ||||||
|           tags: | |           tags: | | ||||||
|             beryju/authentik:2021.12.1-rc3, |             beryju/authentik:2021.12.5, | ||||||
|             beryju/authentik:latest, |             beryju/authentik:latest, | ||||||
|             ghcr.io/goauthentik/server:2021.12.1-rc3, |             ghcr.io/goauthentik/server:2021.12.5, | ||||||
|             ghcr.io/goauthentik/server:latest |             ghcr.io/goauthentik/server:latest | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|           context: . |           context: . | ||||||
|       - name: Building Docker Image (stable) |       - name: Building Docker Image (stable) | ||||||
|         if: ${{ github.event_name == 'release' && !contains('2021.12.1-rc3', 'rc') }} |         if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }} | ||||||
|         run: | |         run: | | ||||||
|           docker pull beryju/authentik:latest |           docker pull beryju/authentik:latest | ||||||
|           docker tag beryju/authentik:latest beryju/authentik:stable |           docker tag beryju/authentik:latest beryju/authentik:stable | ||||||
| @ -57,7 +57,7 @@ jobs: | |||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-go@v2 |       - uses: actions/setup-go@v2 | ||||||
|         with: |         with: | ||||||
|           go-version: "^1.15" |           go-version: "^1.17" | ||||||
|       - name: Set up QEMU |       - name: Set up QEMU | ||||||
|         uses: docker/setup-qemu-action@v1.2.0 |         uses: docker/setup-qemu-action@v1.2.0 | ||||||
|       - name: Set up Docker Buildx |       - name: Set up Docker Buildx | ||||||
| @ -78,14 +78,14 @@ jobs: | |||||||
|         with: |         with: | ||||||
|           push: ${{ github.event_name == 'release' }} |           push: ${{ github.event_name == 'release' }} | ||||||
|           tags: | |           tags: | | ||||||
|             beryju/authentik-${{ matrix.type }}:2021.12.1-rc3, |             beryju/authentik-${{ matrix.type }}:2021.12.5, | ||||||
|             beryju/authentik-${{ matrix.type }}:latest, |             beryju/authentik-${{ matrix.type }}:latest, | ||||||
|             ghcr.io/goauthentik/${{ matrix.type }}:2021.12.1-rc3, |             ghcr.io/goauthentik/${{ matrix.type }}:2021.12.5, | ||||||
|             ghcr.io/goauthentik/${{ matrix.type }}:latest |             ghcr.io/goauthentik/${{ matrix.type }}:latest | ||||||
|           file: ${{ matrix.type }}.Dockerfile |           file: ${{ matrix.type }}.Dockerfile | ||||||
|           platforms: linux/amd64,linux/arm64 |           platforms: linux/amd64,linux/arm64 | ||||||
|       - name: Building Docker Image (stable) |       - name: Building Docker Image (stable) | ||||||
|         if: ${{ github.event_name == 'release' && !contains('2021.12.1-rc3', 'rc') }} |         if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }} | ||||||
|         run: | |         run: | | ||||||
|           docker pull beryju/authentik-${{ matrix.type }}:latest |           docker pull beryju/authentik-${{ matrix.type }}:latest | ||||||
|           docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable |           docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable | ||||||
| @ -93,10 +93,50 @@ jobs: | |||||||
|           docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest |           docker pull ghcr.io/goauthentik/${{ matrix.type }}:latest | ||||||
|           docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable |           docker tag ghcr.io/goauthentik/${{ matrix.type }}:latest ghcr.io/goauthentik/${{ matrix.type }}:stable | ||||||
|           docker push ghcr.io/goauthentik/${{ matrix.type }}:stable |           docker push ghcr.io/goauthentik/${{ matrix.type }}:stable | ||||||
|  |   build-outpost-binary: | ||||||
|  |     timeout-minutes: 120 | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     strategy: | ||||||
|  |       fail-fast: false | ||||||
|  |       matrix: | ||||||
|  |         type: | ||||||
|  |           - proxy | ||||||
|  |           - ldap | ||||||
|  |         goos: [linux, darwin] | ||||||
|  |         goarch: [amd64, arm64] | ||||||
|  |     steps: | ||||||
|  |       - uses: actions/checkout@v2 | ||||||
|  |       - uses: actions/setup-go@v2 | ||||||
|  |         with: | ||||||
|  |           go-version: "^1.17" | ||||||
|  |       - uses: actions/setup-node@v2 | ||||||
|  |         with: | ||||||
|  |           node-version: '16' | ||||||
|  |           cache: 'npm' | ||||||
|  |           cache-dependency-path: web/package-lock.json | ||||||
|  |       - name: Build web | ||||||
|  |         run: | | ||||||
|  |           cd web | ||||||
|  |           npm install | ||||||
|  |           npm run build-proxy | ||||||
|  |       - name: Build outpost | ||||||
|  |         run: | | ||||||
|  |           set -x | ||||||
|  |           export GOOS=${{ matrix.goos }} | ||||||
|  |           export GOARCH=${{ matrix.goarch }} | ||||||
|  |           go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} | ||||||
|  |       - name: Upload binaries to release | ||||||
|  |         uses: svenstaro/upload-release-action@v2 | ||||||
|  |         with: | ||||||
|  |           repo_token: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  |           file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|  |           asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} | ||||||
|  |           tag: ${{ github.ref }} | ||||||
|   test-release: |   test-release: | ||||||
|     needs: |     needs: | ||||||
|       - build-server |       - build-server | ||||||
|       - build-outpost |       - build-outpost | ||||||
|  |       - build-outpost-binary | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
| @ -110,20 +150,17 @@ jobs: | |||||||
|           docker-compose run -u root server test |           docker-compose run -u root server test | ||||||
|   sentry-release: |   sentry-release: | ||||||
|     needs: |     needs: | ||||||
|       - test-release |       - build-server | ||||||
|  |       - build-outpost | ||||||
|  |       - build-outpost-binary | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - name: Setup Node.js environment |       - name: Get static files from docker image | ||||||
|         uses: actions/setup-node@v2 |  | ||||||
|         with: |  | ||||||
|           node-version: '16' |  | ||||||
|       - name: Build web api client and web ui |  | ||||||
|         run: | |         run: | | ||||||
|           export NODE_ENV=production |           docker pull ghcr.io/goauthentik/server:latest | ||||||
|           cd web |           container=$(docker container create ghcr.io/goauthentik/server:latest) | ||||||
|           npm i |           docker cp ${container}:web/ . | ||||||
|           npm run build |  | ||||||
|       - name: Create a Sentry.io release |       - name: Create a Sentry.io release | ||||||
|         uses: getsentry/action-release@v1 |         uses: getsentry/action-release@v1 | ||||||
|         if: ${{ github.event_name == 'release' }} |         if: ${{ github.event_name == 'release' }} | ||||||
| @ -133,7 +170,7 @@ jobs: | |||||||
|           SENTRY_PROJECT: authentik |           SENTRY_PROJECT: authentik | ||||||
|           SENTRY_URL: https://sentry.beryju.org |           SENTRY_URL: https://sentry.beryju.org | ||||||
|         with: |         with: | ||||||
|           version: authentik@2021.12.1-rc3 |           version: authentik@2021.12.5 | ||||||
|           environment: beryjuorg-prod |           environment: beryjuorg-prod | ||||||
|           sourcemaps: './web/dist' |           sourcemaps: './web/dist' | ||||||
|           url_prefix: '~/static/dist' |           url_prefix: '~/static/dist' | ||||||
|  | |||||||
							
								
								
									
										12
									
								
								.github/workflows/translation-compile.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/translation-compile.yml
									
									
									
									
										vendored
									
									
								
							| @ -22,22 +22,20 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - uses: actions/setup-python@v2 |       - uses: actions/setup-python@v2 | ||||||
|         with: |       - id: cache-poetry | ||||||
|           python-version: '3.9' |  | ||||||
|       - id: cache-pipenv |  | ||||||
|         uses: actions/cache@v2.1.7 |         uses: actions/cache@v2.1.7 | ||||||
|         with: |         with: | ||||||
|           path: ~/.local/share/virtualenvs |           path: ~/.cache/pypoetry/virtualenvs | ||||||
|           key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }} |           key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }} | ||||||
|       - name: prepare |       - name: prepare | ||||||
|         env: |         env: | ||||||
|           INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }} |           INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }} | ||||||
|         run: | |         run: | | ||||||
|           sudo apt-get update |           sudo apt-get update | ||||||
|           sudo apt-get install -y gettext |           sudo apt-get install -y gettext | ||||||
|           scripts/ci_prepare.sh |           scripts/ci_prepare.sh | ||||||
|       - name: run compile |       - name: run compile | ||||||
|         run: pipenv run ./manage.py compilemessages |         run: poetry run ./manage.py compilemessages | ||||||
|       - name: Create Pull Request |       - name: Create Pull Request | ||||||
|         uses: peter-evans/create-pull-request@v3 |         uses: peter-evans/create-pull-request@v3 | ||||||
|         id: cpr |         id: cpr | ||||||
|  | |||||||
							
								
								
									
										1
									
								
								.python-version
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								.python-version
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1 @@ | |||||||
|  | 3.9.7 | ||||||
							
								
								
									
										3
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -11,7 +11,8 @@ | |||||||
|         "saml", |         "saml", | ||||||
|         "totp", |         "totp", | ||||||
|         "webauthn", |         "webauthn", | ||||||
|         "traefik" |         "traefik", | ||||||
|  |         "passwordless" | ||||||
|     ], |     ], | ||||||
|     "python.linting.pylintEnabled": true, |     "python.linting.pylintEnabled": true, | ||||||
|     "todo-tree.tree.showCountsInTree": true, |     "todo-tree.tree.showCountsInTree": true, | ||||||
|  | |||||||
							
								
								
									
										41
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										41
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -1,16 +1,4 @@ | |||||||
| # Stage 1: Lock python dependencies | # Stage 1: Build website | ||||||
| FROM docker.io/python:3.10.1-slim-bullseye as locker |  | ||||||
|  |  | ||||||
| COPY ./Pipfile /app/ |  | ||||||
| COPY ./Pipfile.lock /app/ |  | ||||||
|  |  | ||||||
| WORKDIR /app/ |  | ||||||
|  |  | ||||||
| RUN pip install pipenv && \ |  | ||||||
|     pipenv lock -r > requirements.txt && \ |  | ||||||
|     pipenv lock -r --dev-only > requirements-dev.txt |  | ||||||
|  |  | ||||||
| # Stage 2: Build website |  | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:16 as website-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:16 as website-builder | ||||||
|  |  | ||||||
| COPY ./website /work/website/ | COPY ./website /work/website/ | ||||||
| @ -18,7 +6,7 @@ COPY ./website /work/website/ | |||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
| RUN cd /work/website && npm i && npm run build-docs-only | RUN cd /work/website && npm i && npm run build-docs-only | ||||||
|  |  | ||||||
| # Stage 3: Build webui | # Stage 2: Build webui | ||||||
| FROM --platform=${BUILDPLATFORM} docker.io/node:16 as web-builder | FROM --platform=${BUILDPLATFORM} docker.io/node:16 as web-builder | ||||||
|  |  | ||||||
| COPY ./web /work/web/ | COPY ./web /work/web/ | ||||||
| @ -27,8 +15,8 @@ COPY ./website /work/website/ | |||||||
| ENV NODE_ENV=production | ENV NODE_ENV=production | ||||||
| RUN cd /work/web && npm i && npm run build | RUN cd /work/web && npm i && npm run build | ||||||
|  |  | ||||||
| # Stage 4: Build go proxy | # Stage 3: Build go proxy | ||||||
| FROM docker.io/golang:1.17.4-bullseye AS builder | FROM docker.io/golang:1.17.5-bullseye AS builder | ||||||
|  |  | ||||||
| WORKDIR /work | WORKDIR /work | ||||||
|  |  | ||||||
| @ -43,29 +31,38 @@ COPY ./go.sum /work/go.sum | |||||||
|  |  | ||||||
| RUN go build -o /work/authentik ./cmd/server/main.go | RUN go build -o /work/authentik ./cmd/server/main.go | ||||||
|  |  | ||||||
| # Stage 5: Run | # Stage 4: Run | ||||||
| FROM docker.io/python:3.10.1-slim-bullseye | FROM docker.io/python:3.10.1-slim-bullseye | ||||||
|  |  | ||||||
|  | LABEL org.opencontainers.image.url https://goauthentik.io | ||||||
|  | LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. | ||||||
|  | LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik | ||||||
|  |  | ||||||
| WORKDIR / | WORKDIR / | ||||||
| COPY --from=locker /app/requirements.txt / |  | ||||||
| COPY --from=locker /app/requirements-dev.txt / |  | ||||||
|  |  | ||||||
| ARG GIT_BUILD_HASH | ARG GIT_BUILD_HASH | ||||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||||
|  |  | ||||||
|  | COPY ./pyproject.toml / | ||||||
|  | COPY ./poetry.lock / | ||||||
|  |  | ||||||
| RUN apt-get update && \ | RUN apt-get update && \ | ||||||
|     apt-get install -y --no-install-recommends \ |     apt-get install -y --no-install-recommends \ | ||||||
|         curl ca-certificates gnupg git runit libpq-dev \ |         curl ca-certificates gnupg git runit libpq-dev \ | ||||||
|         postgresql-client build-essential libxmlsec1-dev \ |         postgresql-client build-essential libxmlsec1-dev \ | ||||||
|         pkg-config libmaxminddb0 && \ |         pkg-config libmaxminddb0 && \ | ||||||
|     pip install -r /requirements.txt --no-cache-dir && \ |     pip install poetry && \ | ||||||
|  |     poetry config virtualenvs.create false && \ | ||||||
|  |     poetry install --no-dev && \ | ||||||
|  |     rm -rf ~/.cache/pypoetry && \ | ||||||
|     apt-get remove --purge -y build-essential git && \ |     apt-get remove --purge -y build-essential git && \ | ||||||
|     apt-get autoremove --purge -y && \ |     apt-get autoremove --purge -y && \ | ||||||
|     apt-get clean && \ |     apt-get clean && \ | ||||||
|     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ |     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ | ||||||
|     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ |     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ | ||||||
|     mkdir /backups /certs && \ |     mkdir -p /backups /certs /media && \ | ||||||
|     chown authentik:authentik /backups /certs |     mkdir -p /authentik/.ssh && \ | ||||||
|  |     chown authentik:authentik /backups /certs /media /authentik/.ssh | ||||||
|  |  | ||||||
| COPY ./authentik/ /authentik | COPY ./authentik/ /authentik | ||||||
| COPY ./pyproject.toml / | COPY ./pyproject.toml / | ||||||
|  | |||||||
							
								
								
									
										26
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								Makefile
									
									
									
									
									
								
							| @ -4,13 +4,16 @@ UID = $(shell id -u) | |||||||
| GID = $(shell id -g) | GID = $(shell id -g) | ||||||
| NPM_VERSION = $(shell python -m scripts.npm_version) | NPM_VERSION = $(shell python -m scripts.npm_version) | ||||||
|  |  | ||||||
| all: lint-fix lint test gen | all: lint-fix lint test gen web | ||||||
|  |  | ||||||
| test-integration: | test-integration: | ||||||
| 	coverage run manage.py test tests/integration | 	coverage run manage.py test tests/integration | ||||||
|  |  | ||||||
| test-e2e: | test-e2e-provider: | ||||||
| 	coverage run manage.py test tests/e2e | 	coverage run manage.py test tests/e2e/test_provider* | ||||||
|  |  | ||||||
|  | test-e2e-rest: | ||||||
|  | 	coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source* | ||||||
|  |  | ||||||
| test: | test: | ||||||
| 	coverage run manage.py test authentik | 	coverage run manage.py test authentik | ||||||
| @ -32,6 +35,7 @@ lint-fix: | |||||||
| lint: | lint: | ||||||
| 	bandit -r authentik tests lifecycle -x node_modules | 	bandit -r authentik tests lifecycle -x node_modules | ||||||
| 	pylint authentik tests lifecycle | 	pylint authentik tests lifecycle | ||||||
|  | 	golangci-lint run -v | ||||||
|  |  | ||||||
| i18n-extract: i18n-extract-core web-extract | i18n-extract: i18n-extract-core web-extract | ||||||
|  |  | ||||||
| @ -102,20 +106,24 @@ web-extract: | |||||||
| # These targets are use by GitHub actions to allow usage of matrix | # These targets are use by GitHub actions to allow usage of matrix | ||||||
| # which makes the YAML File a lot smaller | # which makes the YAML File a lot smaller | ||||||
|  |  | ||||||
| ci-pylint: | ci--meta-debug: | ||||||
|  | 	python -V | ||||||
|  | 	node --version | ||||||
|  |  | ||||||
|  | ci-pylint: ci--meta-debug | ||||||
| 	pylint authentik tests lifecycle | 	pylint authentik tests lifecycle | ||||||
|  |  | ||||||
| ci-black: | ci-black: ci--meta-debug | ||||||
| 	black --check authentik tests lifecycle | 	black --check authentik tests lifecycle | ||||||
|  |  | ||||||
| ci-isort: | ci-isort: ci--meta-debug | ||||||
| 	isort --check authentik tests lifecycle | 	isort --check authentik tests lifecycle | ||||||
|  |  | ||||||
| ci-bandit: | ci-bandit: ci--meta-debug | ||||||
| 	bandit -r authentik tests lifecycle | 	bandit -r authentik tests lifecycle | ||||||
|  |  | ||||||
| ci-pyright: | ci-pyright: ci--meta-debug | ||||||
| 	pyright e2e lifecycle | 	pyright e2e lifecycle | ||||||
|  |  | ||||||
| ci-pending-migrations: | ci-pending-migrations: ci--meta-debug | ||||||
| 	./manage.py makemigrations --check | 	./manage.py makemigrations --check | ||||||
|  | |||||||
							
								
								
									
										67
									
								
								Pipfile
									
									
									
									
									
								
							
							
						
						
									
										67
									
								
								Pipfile
									
									
									
									
									
								
							| @ -1,67 +0,0 @@ | |||||||
| [[source]] |  | ||||||
| name = "pypi" |  | ||||||
| url = "https://pypi.org/simple" |  | ||||||
| verify_ssl = true |  | ||||||
|  |  | ||||||
| [packages] |  | ||||||
| boto3 = "*" |  | ||||||
| celery = "*" |  | ||||||
| channels = "*" |  | ||||||
| channels-redis = "*" |  | ||||||
| codespell = "*" |  | ||||||
| colorama = "*" |  | ||||||
| dacite = "*" |  | ||||||
| deepmerge = "*" |  | ||||||
| defusedxml = "*" |  | ||||||
| django = "*" |  | ||||||
| django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' } |  | ||||||
| django-filter = "*" |  | ||||||
| django-guardian = "*" |  | ||||||
| django-model-utils = "*" |  | ||||||
| django-otp = "*" |  | ||||||
| django-prometheus = "*" |  | ||||||
| django-redis = "*" |  | ||||||
| django-storages = "*" |  | ||||||
| djangorestframework = "*" |  | ||||||
| djangorestframework-guardian = "*" |  | ||||||
| docker = "*" |  | ||||||
| drf-spectacular = "*" |  | ||||||
| duo-client = "*" |  | ||||||
| facebook-sdk = "*" |  | ||||||
| geoip2 = "*" |  | ||||||
| gunicorn = "*" |  | ||||||
| kubernetes = "==v19.15.0" |  | ||||||
| ldap3 = "*" |  | ||||||
| lxml = "*" |  | ||||||
| packaging = "*" |  | ||||||
| psycopg2-binary = "*" |  | ||||||
| pycryptodome = "*" |  | ||||||
| pyjwt = "*" |  | ||||||
| pyyaml = "*" |  | ||||||
| requests-oauthlib = "*" |  | ||||||
| sentry-sdk = "*" |  | ||||||
| service_identity = "*" |  | ||||||
| structlog = "*" |  | ||||||
| swagger-spec-validator = "*" |  | ||||||
| twisted = "==21.7.0" |  | ||||||
| ua-parser = "*" |  | ||||||
| urllib3 = {extras = ["secure"],version = "*"} |  | ||||||
| uvicorn = {extras = ["standard"],version = "*"} |  | ||||||
| webauthn = "*" |  | ||||||
| xmlsec = "*" |  | ||||||
| flower = "*" |  | ||||||
|  |  | ||||||
| [dev-packages] |  | ||||||
| bandit = "*" |  | ||||||
| black = "==21.11b1" |  | ||||||
| bump2version = "*" |  | ||||||
| colorama = "*" |  | ||||||
| coverage = {extras = ["toml"],version = "*"} |  | ||||||
| pylint = "*" |  | ||||||
| pylint-django = "*" |  | ||||||
| pytest = "*" |  | ||||||
| pytest-django = "*" |  | ||||||
| pytest-randomly = "*" |  | ||||||
| requests-mock = "*" |  | ||||||
| selenium = "*" |  | ||||||
| importlib-metadata = "*" |  | ||||||
							
								
								
									
										2515
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2515
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										20
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										20
									
								
								README.md
									
									
									
									
									
								
							| @ -38,3 +38,23 @@ See [Development Documentation](https://goauthentik.io/developer-docs/?utm_sourc | |||||||
| ## Security | ## Security | ||||||
|  |  | ||||||
| See [SECURITY.md](SECURITY.md) | See [SECURITY.md](SECURITY.md) | ||||||
|  |  | ||||||
|  | ## Sponsors | ||||||
|  |  | ||||||
|  | This project is proudly sponsored by: | ||||||
|  |  | ||||||
|  | <p> | ||||||
|  |     <a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io"> | ||||||
|  |         <img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px"> | ||||||
|  |     </a> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  | DigitalOcean provides development and testing resources for authentik. | ||||||
|  |  | ||||||
|  | <p> | ||||||
|  |     <a href="https://www.netlify.com"> | ||||||
|  |         <img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" /> | ||||||
|  |     </a> | ||||||
|  | </p> | ||||||
|  |  | ||||||
|  | Netlify hosts the [goauthentik.io](goauthentik.io) site. | ||||||
|  | |||||||
| @ -6,8 +6,8 @@ | |||||||
|  |  | ||||||
| | Version    | Supported          | | | Version    | Supported          | | ||||||
| | ---------- | ------------------ | | | ---------- | ------------------ | | ||||||
| | 2021.9.x   | :white_check_mark: | |  | ||||||
| | 2021.10.x  | :white_check_mark: | | | 2021.10.x  | :white_check_mark: | | ||||||
|  | | 2021.12.x  | :white_check_mark: | | ||||||
|  |  | ||||||
| ## Reporting a Vulnerability | ## Reporting a Vulnerability | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,3 +1,3 @@ | |||||||
| """authentik""" | """authentik""" | ||||||
| __version__ = "2021.12.1-rc3" | __version__ = "2021.12.5" | ||||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||||
|  | |||||||
| @ -1,13 +1,6 @@ | |||||||
| """authentik administration metrics""" | """authentik administration metrics""" | ||||||
| import time |  | ||||||
| from collections import Counter |  | ||||||
| from datetime import timedelta |  | ||||||
|  |  | ||||||
| from django.db.models import Count, ExpressionWrapper, F |  | ||||||
| from django.db.models.fields import DurationField |  | ||||||
| from django.db.models.functions import ExtractHour |  | ||||||
| from django.utils.timezone import now |  | ||||||
| from drf_spectacular.utils import extend_schema, extend_schema_field | from drf_spectacular.utils import extend_schema, extend_schema_field | ||||||
|  | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.fields import IntegerField, SerializerMethodField | from rest_framework.fields import IntegerField, SerializerMethodField | ||||||
| from rest_framework.permissions import IsAdminUser | from rest_framework.permissions import IsAdminUser | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
| @ -15,31 +8,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
|  |  | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import EventAction | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]: |  | ||||||
|     """Get event count by hour in the last day, fill with zeros""" |  | ||||||
|     date_from = now() - timedelta(days=1) |  | ||||||
|     result = ( |  | ||||||
|         Event.objects.filter(created__gte=date_from, **filter_kwargs) |  | ||||||
|         .annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField())) |  | ||||||
|         .annotate(age_hours=ExtractHour("age")) |  | ||||||
|         .values("age_hours") |  | ||||||
|         .annotate(count=Count("pk")) |  | ||||||
|         .order_by("age_hours") |  | ||||||
|     ) |  | ||||||
|     data = Counter({int(d["age_hours"]): d["count"] for d in result}) |  | ||||||
|     results = [] |  | ||||||
|     _now = now() |  | ||||||
|     for hour in range(0, -24, -1): |  | ||||||
|         results.append( |  | ||||||
|             { |  | ||||||
|                 "x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000, |  | ||||||
|                 "y_cord": data[hour * -1], |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|     return results |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CoordinateSerializer(PassiveSerializer): | class CoordinateSerializer(PassiveSerializer): | ||||||
| @ -58,12 +27,22 @@ class LoginMetricsSerializer(PassiveSerializer): | |||||||
|     @extend_schema_field(CoordinateSerializer(many=True)) |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|     def get_logins_per_1h(self, _): |     def get_logins_per_1h(self, _): | ||||||
|         """Get successful logins per hour for the last 24 hours""" |         """Get successful logins per hour for the last 24 hours""" | ||||||
|         return get_events_per_1h(action=EventAction.LOGIN) |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event") | ||||||
|  |             .filter(action=EventAction.LOGIN) | ||||||
|  |             .get_events_per_hour() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @extend_schema_field(CoordinateSerializer(many=True)) |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|     def get_logins_failed_per_1h(self, _): |     def get_logins_failed_per_1h(self, _): | ||||||
|         """Get failed logins per hour for the last 24 hours""" |         """Get failed logins per hour for the last 24 hours""" | ||||||
|         return get_events_per_1h(action=EventAction.LOGIN_FAILED) |         user = self.context["user"] | ||||||
|  |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event") | ||||||
|  |             .filter(action=EventAction.LOGIN_FAILED) | ||||||
|  |             .get_events_per_hour() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class AdministrationMetricsViewSet(APIView): | class AdministrationMetricsViewSet(APIView): | ||||||
| @ -75,4 +54,5 @@ class AdministrationMetricsViewSet(APIView): | |||||||
|     def get(self, request: Request) -> Response: |     def get(self, request: Request) -> Response: | ||||||
|         """Login Metrics per 1h""" |         """Login Metrics per 1h""" | ||||||
|         serializer = LoginMetricsSerializer(True) |         serializer = LoginMetricsSerializer(True) | ||||||
|  |         serializer.context["user"] = request.user | ||||||
|         return Response(serializer.data) |         return Response(serializer.data) | ||||||
|  | |||||||
| @ -95,7 +95,7 @@ class TaskViewSet(ViewSet): | |||||||
|                 _("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}), |                 _("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}), | ||||||
|             ) |             ) | ||||||
|             return Response(status=204) |             return Response(status=204) | ||||||
|         except ImportError:  # pragma: no cover |         except (ImportError, AttributeError):  # pragma: no cover | ||||||
|             # if we get an import error, the module path has probably changed |             # if we get an import error, the module path has probably changed | ||||||
|             task.delete() |             task.delete() | ||||||
|             return Response(status=500) |             return Response(status=500) | ||||||
|  | |||||||
| @ -1,7 +1,7 @@ | |||||||
| """API Authentication""" | """API Authentication""" | ||||||
| from base64 import b64decode | from base64 import b64decode | ||||||
| from binascii import Error | from binascii import Error | ||||||
| from typing import Any, Optional, Union | from typing import Any, Optional | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from rest_framework.authentication import BaseAuthentication, get_authorization_header | from rest_framework.authentication import BaseAuthentication, get_authorization_header | ||||||
| @ -69,7 +69,7 @@ def token_secret_key(value: str) -> Optional[User]: | |||||||
| class TokenAuthentication(BaseAuthentication): | class TokenAuthentication(BaseAuthentication): | ||||||
|     """Token-based authentication using HTTP Bearer authentication""" |     """Token-based authentication using HTTP Bearer authentication""" | ||||||
|  |  | ||||||
|     def authenticate(self, request: Request) -> Union[tuple[User, Any], None]: |     def authenticate(self, request: Request) -> tuple[User, Any] | None: | ||||||
|         """Token-based authentication using HTTP Bearer authentication""" |         """Token-based authentication using HTTP Bearer authentication""" | ||||||
|         auth = get_authorization_header(request) |         auth = get_authorization_header(request) | ||||||
|  |  | ||||||
|  | |||||||
| @ -46,11 +46,7 @@ from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet | |||||||
| from authentik.policies.expression.api import ExpressionPolicyViewSet | from authentik.policies.expression.api import ExpressionPolicyViewSet | ||||||
| from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet | from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet | ||||||
| from authentik.policies.password.api import PasswordPolicyViewSet | from authentik.policies.password.api import PasswordPolicyViewSet | ||||||
| from authentik.policies.reputation.api import ( | from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet | ||||||
|     IPReputationViewSet, |  | ||||||
|     ReputationPolicyViewSet, |  | ||||||
|     UserReputationViewSet, |  | ||||||
| ) |  | ||||||
| from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet | from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet | ||||||
| from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet | from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet | ||||||
| from authentik.providers.oauth2.api.scope import ScopeMappingViewSet | from authentik.providers.oauth2.api.scope import ScopeMappingViewSet | ||||||
| @ -151,8 +147,7 @@ router.register("policies/event_matcher", EventMatcherPolicyViewSet) | |||||||
| router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet) | router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet) | ||||||
| router.register("policies/password_expiry", PasswordExpiryPolicyViewSet) | router.register("policies/password_expiry", PasswordExpiryPolicyViewSet) | ||||||
| router.register("policies/password", PasswordPolicyViewSet) | router.register("policies/password", PasswordPolicyViewSet) | ||||||
| router.register("policies/reputation/users", UserReputationViewSet) | router.register("policies/reputation/scores", ReputationViewSet) | ||||||
| router.register("policies/reputation/ips", IPReputationViewSet) |  | ||||||
| router.register("policies/reputation", ReputationPolicyViewSet) | router.register("policies/reputation", ReputationPolicyViewSet) | ||||||
|  |  | ||||||
| router.register("providers/all", ProviderViewSet) | router.register("providers/all", ProviderViewSet) | ||||||
|  | |||||||
| @ -5,6 +5,7 @@ from django.http.response import HttpResponseBadRequest | |||||||
| from django.shortcuts import get_object_or_404 | from django.shortcuts import get_object_or_404 | ||||||
| from drf_spectacular.types import OpenApiTypes | from drf_spectacular.types import OpenApiTypes | ||||||
| from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema | ||||||
|  | from guardian.shortcuts import get_objects_for_user | ||||||
| from rest_framework.decorators import action | from rest_framework.decorators import action | ||||||
| from rest_framework.fields import ReadOnlyField | from rest_framework.fields import ReadOnlyField | ||||||
| from rest_framework.parsers import MultiPartParser | from rest_framework.parsers import MultiPartParser | ||||||
| @ -15,7 +16,7 @@ from rest_framework.viewsets import ModelViewSet | |||||||
| from rest_framework_guardian.filters import ObjectPermissionsFilter | from rest_framework_guardian.filters import ObjectPermissionsFilter | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.api.decorators import permission_required | from authentik.api.decorators import permission_required | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| @ -239,8 +240,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet): | |||||||
|         """Metrics for application logins""" |         """Metrics for application logins""" | ||||||
|         app = self.get_object() |         app = self.get_object() | ||||||
|         return Response( |         return Response( | ||||||
|             get_events_per_1h( |             get_objects_for_user(request.user, "authentik_events.view_event") | ||||||
|  |             .filter( | ||||||
|                 action=EventAction.AUTHORIZE_APPLICATION, |                 action=EventAction.AUTHORIZE_APPLICATION, | ||||||
|                 context__authorized_application__pk=app.pk.hex, |                 context__authorized_application__pk=app.pk.hex, | ||||||
|             ) |             ) | ||||||
|  |             .get_events_per_hour() | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,9 +1,11 @@ | |||||||
| """Groups API Viewset""" | """Groups API Viewset""" | ||||||
|  | from json import loads | ||||||
|  |  | ||||||
| from django.db.models.query import QuerySet | from django.db.models.query import QuerySet | ||||||
| from django_filters.filters import ModelMultipleChoiceFilter | from django_filters.filters import CharFilter, ModelMultipleChoiceFilter | ||||||
| from django_filters.filterset import FilterSet | from django_filters.filterset import FilterSet | ||||||
| from rest_framework.fields import CharField, JSONField | from rest_framework.fields import CharField, JSONField | ||||||
| from rest_framework.serializers import ListSerializer, ModelSerializer | from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
| from rest_framework_guardian.filters import ObjectPermissionsFilter | from rest_framework_guardian.filters import ObjectPermissionsFilter | ||||||
|  |  | ||||||
| @ -62,6 +64,13 @@ class GroupSerializer(ModelSerializer): | |||||||
| class GroupFilter(FilterSet): | class GroupFilter(FilterSet): | ||||||
|     """Filter for groups""" |     """Filter for groups""" | ||||||
|  |  | ||||||
|  |     attributes = CharFilter( | ||||||
|  |         field_name="attributes", | ||||||
|  |         lookup_expr="", | ||||||
|  |         label="Attributes", | ||||||
|  |         method="filter_attributes", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     members_by_username = ModelMultipleChoiceFilter( |     members_by_username = ModelMultipleChoiceFilter( | ||||||
|         field_name="users__username", |         field_name="users__username", | ||||||
|         to_field_name="username", |         to_field_name="username", | ||||||
| @ -72,10 +81,28 @@ class GroupFilter(FilterSet): | |||||||
|         queryset=User.objects.all(), |         queryset=User.objects.all(), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |     # pylint: disable=unused-argument | ||||||
|  |     def filter_attributes(self, queryset, name, value): | ||||||
|  |         """Filter attributes by query args""" | ||||||
|  |         try: | ||||||
|  |             value = loads(value) | ||||||
|  |         except ValueError: | ||||||
|  |             raise ValidationError(detail="filter: failed to parse JSON") | ||||||
|  |         if not isinstance(value, dict): | ||||||
|  |             raise ValidationError(detail="filter: value must be key:value mapping") | ||||||
|  |         qs = {} | ||||||
|  |         for key, _value in value.items(): | ||||||
|  |             qs[f"attributes__{key}"] = _value | ||||||
|  |         try: | ||||||
|  |             _ = len(queryset.filter(**qs)) | ||||||
|  |             return queryset.filter(**qs) | ||||||
|  |         except ValueError: | ||||||
|  |             return queryset | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  |  | ||||||
|         model = Group |         model = Group | ||||||
|         fields = ["name", "is_superuser", "members_by_pk", "members_by_username"] |         fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"] | ||||||
|  |  | ||||||
|  |  | ||||||
| class GroupViewSet(UsedByMixin, ModelViewSet): | class GroupViewSet(UsedByMixin, ModelViewSet): | ||||||
|  | |||||||
| @ -104,14 +104,14 @@ class SourceViewSet( | |||||||
|         ) |         ) | ||||||
|         matching_sources: list[UserSettingSerializer] = [] |         matching_sources: list[UserSettingSerializer] = [] | ||||||
|         for source in _all_sources: |         for source in _all_sources: | ||||||
|             user_settings = source.ui_user_settings |             user_settings = source.ui_user_settings() | ||||||
|             if not user_settings: |             if not user_settings: | ||||||
|                 continue |                 continue | ||||||
|             policy_engine = PolicyEngine(source, request.user, request) |             policy_engine = PolicyEngine(source, request.user, request) | ||||||
|             policy_engine.build() |             policy_engine.build() | ||||||
|             if not policy_engine.passing: |             if not policy_engine.passing: | ||||||
|                 continue |                 continue | ||||||
|             source_settings = source.ui_user_settings |             source_settings = source.ui_user_settings() | ||||||
|             source_settings.initial_data["object_uid"] = source.slug |             source_settings.initial_data["object_uid"] = source.slug | ||||||
|             if not source_settings.is_valid(): |             if not source_settings.is_valid(): | ||||||
|                 LOGGER.warning(source_settings.errors) |                 LOGGER.warning(source_settings.errors) | ||||||
|  | |||||||
| @ -3,6 +3,7 @@ from datetime import timedelta | |||||||
| from json import loads | from json import loads | ||||||
| from typing import Optional | from typing import Optional | ||||||
|  |  | ||||||
|  | from django.contrib.auth import update_session_auth_hash | ||||||
| from django.db.models.query import QuerySet | from django.db.models.query import QuerySet | ||||||
| from django.db.transaction import atomic | from django.db.transaction import atomic | ||||||
| from django.db.utils import IntegrityError | from django.db.utils import IntegrityError | ||||||
| @ -38,7 +39,7 @@ from rest_framework.viewsets import ModelViewSet | |||||||
| from rest_framework_guardian.filters import ObjectPermissionsFilter | from rest_framework_guardian.filters import ObjectPermissionsFilter | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.api.decorators import permission_required | from authentik.api.decorators import permission_required | ||||||
| from authentik.core.api.groups import GroupSerializer | from authentik.core.api.groups import GroupSerializer | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| @ -46,6 +47,7 @@ from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict | |||||||
| from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER | from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
|     USER_ATTRIBUTE_CHANGE_EMAIL, |     USER_ATTRIBUTE_CHANGE_EMAIL, | ||||||
|  |     USER_ATTRIBUTE_CHANGE_NAME, | ||||||
|     USER_ATTRIBUTE_CHANGE_USERNAME, |     USER_ATTRIBUTE_CHANGE_USERNAME, | ||||||
|     USER_ATTRIBUTE_SA, |     USER_ATTRIBUTE_SA, | ||||||
|     USER_ATTRIBUTE_TOKEN_EXPIRING, |     USER_ATTRIBUTE_TOKEN_EXPIRING, | ||||||
| @ -134,6 +136,16 @@ class UserSelfSerializer(ModelSerializer): | |||||||
|             raise ValidationError("Not allowed to change email.") |             raise ValidationError("Not allowed to change email.") | ||||||
|         return email |         return email | ||||||
|  |  | ||||||
|  |     def validate_name(self, name: str): | ||||||
|  |         """Check if the user is allowed to change their name""" | ||||||
|  |         if self.instance.group_attributes().get( | ||||||
|  |             USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True) | ||||||
|  |         ): | ||||||
|  |             return name | ||||||
|  |         if name != self.instance.name: | ||||||
|  |             raise ValidationError("Not allowed to change name.") | ||||||
|  |         return name | ||||||
|  |  | ||||||
|     def validate_username(self, username: str): |     def validate_username(self, username: str): | ||||||
|         """Check if the user is allowed to change their username""" |         """Check if the user is allowed to change their username""" | ||||||
|         if self.instance.group_attributes().get( |         if self.instance.group_attributes().get( | ||||||
| @ -144,6 +156,13 @@ class UserSelfSerializer(ModelSerializer): | |||||||
|             raise ValidationError("Not allowed to change username.") |             raise ValidationError("Not allowed to change username.") | ||||||
|         return username |         return username | ||||||
|  |  | ||||||
|  |     def save(self, **kwargs): | ||||||
|  |         if self.instance: | ||||||
|  |             attributes: dict = self.instance.attributes | ||||||
|  |             attributes.update(self.validated_data.get("attributes", {})) | ||||||
|  |             self.validated_data["attributes"] = attributes | ||||||
|  |         return super().save(**kwargs) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  |  | ||||||
|         model = User |         model = User | ||||||
| @ -184,19 +203,31 @@ class UserMetricsSerializer(PassiveSerializer): | |||||||
|     def get_logins_per_1h(self, _): |     def get_logins_per_1h(self, _): | ||||||
|         """Get successful logins per hour for the last 24 hours""" |         """Get successful logins per hour for the last 24 hours""" | ||||||
|         user = self.context["user"] |         user = self.context["user"] | ||||||
|         return get_events_per_1h(action=EventAction.LOGIN, user__pk=user.pk) |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event") | ||||||
|  |             .filter(action=EventAction.LOGIN, user__pk=user.pk) | ||||||
|  |             .get_events_per_hour() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @extend_schema_field(CoordinateSerializer(many=True)) |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|     def get_logins_failed_per_1h(self, _): |     def get_logins_failed_per_1h(self, _): | ||||||
|         """Get failed logins per hour for the last 24 hours""" |         """Get failed logins per hour for the last 24 hours""" | ||||||
|         user = self.context["user"] |         user = self.context["user"] | ||||||
|         return get_events_per_1h(action=EventAction.LOGIN_FAILED, context__username=user.username) |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event") | ||||||
|  |             .filter(action=EventAction.LOGIN_FAILED, context__username=user.username) | ||||||
|  |             .get_events_per_hour() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @extend_schema_field(CoordinateSerializer(many=True)) |     @extend_schema_field(CoordinateSerializer(many=True)) | ||||||
|     def get_authorizations_per_1h(self, _): |     def get_authorizations_per_1h(self, _): | ||||||
|         """Get failed logins per hour for the last 24 hours""" |         """Get failed logins per hour for the last 24 hours""" | ||||||
|         user = self.context["user"] |         user = self.context["user"] | ||||||
|         return get_events_per_1h(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk) |         return ( | ||||||
|  |             get_objects_for_user(user, "authentik_events.view_event") | ||||||
|  |             .filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk) | ||||||
|  |             .get_events_per_hour() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UsersFilter(FilterSet): | class UsersFilter(FilterSet): | ||||||
| @ -233,7 +264,11 @@ class UsersFilter(FilterSet): | |||||||
|         qs = {} |         qs = {} | ||||||
|         for key, _value in value.items(): |         for key, _value in value.items(): | ||||||
|             qs[f"attributes__{key}"] = _value |             qs[f"attributes__{key}"] = _value | ||||||
|         return queryset.filter(**qs) |         try: | ||||||
|  |             _ = len(queryset.filter(**qs)) | ||||||
|  |             return queryset.filter(**qs) | ||||||
|  |         except ValueError: | ||||||
|  |             return queryset | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = User |         model = User | ||||||
| @ -343,6 +378,35 @@ class UserViewSet(UsedByMixin, ModelViewSet): | |||||||
|             ).data |             ).data | ||||||
|         return Response(serializer.initial_data) |         return Response(serializer.initial_data) | ||||||
|  |  | ||||||
|  |     @permission_required("authentik_core.reset_user_password") | ||||||
|  |     @extend_schema( | ||||||
|  |         request=inline_serializer( | ||||||
|  |             "UserPasswordSetSerializer", | ||||||
|  |             { | ||||||
|  |                 "password": CharField(required=True), | ||||||
|  |             }, | ||||||
|  |         ), | ||||||
|  |         responses={ | ||||||
|  |             204: "", | ||||||
|  |             400: "", | ||||||
|  |         }, | ||||||
|  |     ) | ||||||
|  |     @action(detail=True, methods=["POST"]) | ||||||
|  |     # pylint: disable=invalid-name, unused-argument | ||||||
|  |     def set_password(self, request: Request, pk: int) -> Response: | ||||||
|  |         """Set password for user""" | ||||||
|  |         user: User = self.get_object() | ||||||
|  |         try: | ||||||
|  |             user.set_password(request.data.get("password")) | ||||||
|  |             user.save() | ||||||
|  |         except (ValidationError, IntegrityError) as exc: | ||||||
|  |             LOGGER.debug("Failed to set password", exc=exc) | ||||||
|  |             return Response(status=400) | ||||||
|  |         if user.pk == request.user.pk and SESSION_IMPERSONATE_USER not in self.request.session: | ||||||
|  |             LOGGER.debug("Updating session hash after password change") | ||||||
|  |             update_session_auth_hash(self.request, user) | ||||||
|  |         return Response(status=204) | ||||||
|  |  | ||||||
|     @extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)}) |     @extend_schema(request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)}) | ||||||
|     @action( |     @action( | ||||||
|         methods=["PUT"], |         methods=["PUT"], | ||||||
|  | |||||||
| @ -5,6 +5,7 @@ from typing import Callable | |||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
|  | from sentry_sdk.api import set_tag | ||||||
|  |  | ||||||
| SESSION_IMPERSONATE_USER = "authentik_impersonate_user" | SESSION_IMPERSONATE_USER = "authentik_impersonate_user" | ||||||
| SESSION_IMPERSONATE_ORIGINAL_USER = "authentik_impersonate_original_user" | SESSION_IMPERSONATE_ORIGINAL_USER = "authentik_impersonate_original_user" | ||||||
| @ -50,6 +51,7 @@ class RequestIDMiddleware: | |||||||
|                 "request_id": request_id, |                 "request_id": request_id, | ||||||
|                 "host": request.get_host(), |                 "host": request.get_host(), | ||||||
|             } |             } | ||||||
|  |             set_tag("authentik.request_id", request_id) | ||||||
|         response = self.get_response(request) |         response = self.get_response(request) | ||||||
|         response[RESPONSE_HEADER_ID] = request.request_id |         response[RESPONSE_HEADER_ID] = request.request_id | ||||||
|         setattr(response, "ak_context", {}) |         setattr(response, "ak_context", {}) | ||||||
|  | |||||||
| @ -15,7 +15,6 @@ import authentik.lib.models | |||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     from django.contrib.sessions.backends.cache import KEY_PREFIX |     from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|     from django.core.cache import cache |     from django.core.cache import cache | ||||||
|  |  | ||||||
|  | |||||||
| @ -12,7 +12,6 @@ import authentik.core.models | |||||||
|  |  | ||||||
|  |  | ||||||
| def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|     db_alias = schema_editor.connection.alias |  | ||||||
|     from django.contrib.sessions.backends.cache import KEY_PREFIX |     from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
|     from django.core.cache import cache |     from django.core.cache import cache | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,12 +1,13 @@ | |||||||
| """authentik core models""" | """authentik core models""" | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from hashlib import md5, sha256 | from hashlib import md5, sha256 | ||||||
| from typing import Any, Optional, Type | from typing import Any, Optional | ||||||
| from urllib.parse import urlencode | from urllib.parse import urlencode | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from deepmerge import always_merger | from deepmerge import always_merger | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
|  | from django.contrib.auth.hashers import check_password | ||||||
| from django.contrib.auth.models import AbstractUser | from django.contrib.auth.models import AbstractUser | ||||||
| from django.contrib.auth.models import UserManager as DjangoUserManager | from django.contrib.auth.models import UserManager as DjangoUserManager | ||||||
| from django.db import models | from django.db import models | ||||||
| @ -38,6 +39,7 @@ USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account" | |||||||
| USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources" | USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources" | ||||||
| USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires"  # nosec | USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires"  # nosec | ||||||
| USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" | USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" | ||||||
|  | USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name" | ||||||
| USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email" | USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email" | ||||||
| USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips" | USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips" | ||||||
|  |  | ||||||
| @ -160,6 +162,22 @@ class User(GuardianUserMixin, AbstractUser): | |||||||
|         self.password_change_date = now() |         self.password_change_date = now() | ||||||
|         return super().set_password(password) |         return super().set_password(password) | ||||||
|  |  | ||||||
|  |     def check_password(self, raw_password: str) -> bool: | ||||||
|  |         """ | ||||||
|  |         Return a boolean of whether the raw_password was correct. Handles | ||||||
|  |         hashing formats behind the scenes. | ||||||
|  |  | ||||||
|  |         Slightly changed version which doesn't send a signal for such internal hash upgrades | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         def setter(raw_password): | ||||||
|  |             self.set_password(raw_password, signal=False) | ||||||
|  |             # Password hash upgrades shouldn't be considered password changes. | ||||||
|  |             self._password = None | ||||||
|  |             self.save(update_fields=["password"]) | ||||||
|  |  | ||||||
|  |         return check_password(raw_password, self.password, setter) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def uid(self) -> str: |     def uid(self) -> str: | ||||||
|         """Generate a globall unique UID, based on the user ID and the hashed secret key""" |         """Generate a globall unique UID, based on the user ID and the hashed secret key""" | ||||||
| @ -224,7 +242,7 @@ class Provider(SerializerModel): | |||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         """Get serializer for this model""" |         """Get serializer for this model""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
| @ -270,15 +288,21 @@ class Application(PolicyBindingModel): | |||||||
|         """Get launch URL if set, otherwise attempt to get launch URL based on provider.""" |         """Get launch URL if set, otherwise attempt to get launch URL based on provider.""" | ||||||
|         if self.meta_launch_url: |         if self.meta_launch_url: | ||||||
|             return self.meta_launch_url |             return self.meta_launch_url | ||||||
|         if self.provider: |         if provider := self.get_provider(): | ||||||
|             return self.get_provider().launch_url |             return provider.launch_url | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     def get_provider(self) -> Optional[Provider]: |     def get_provider(self) -> Optional[Provider]: | ||||||
|         """Get casted provider instance""" |         """Get casted provider instance""" | ||||||
|         if not self.provider: |         if not self.provider: | ||||||
|             return None |             return None | ||||||
|         return Provider.objects.get_subclass(pk=self.provider.pk) |         # if the Application class has been cache, self.provider is set | ||||||
|  |         # but doing a direct query lookup will fail. | ||||||
|  |         # In that case, just return None | ||||||
|  |         try: | ||||||
|  |             return Provider.objects.get_subclass(pk=self.provider.pk) | ||||||
|  |         except Provider.DoesNotExist: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return self.name |         return self.name | ||||||
| @ -359,13 +383,11 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel): | |||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |     def ui_login_button(self, request: HttpRequest) -> Optional[UILoginButton]: | ||||||
|     def ui_login_button(self) -> Optional[UILoginButton]: |  | ||||||
|         """If source uses a http-based flow, return UI Information about the login |         """If source uses a http-based flow, return UI Information about the login | ||||||
|         button. If source doesn't use http-based flow, return None.""" |         button. If source doesn't use http-based flow, return None.""" | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def ui_user_settings(self) -> Optional[UserSettingSerializer]: |     def ui_user_settings(self) -> Optional[UserSettingSerializer]: | ||||||
|         """Entrypoint to integrate with User settings. Can either return None if no |         """Entrypoint to integrate with User settings. Can either return None if no | ||||||
|         user settings are available, or UserSettingSerializer.""" |         user settings are available, or UserSettingSerializer.""" | ||||||
| @ -452,6 +474,14 @@ class Token(ManagedModel, ExpiringModel): | |||||||
|         """Handler which is called when this object is expired.""" |         """Handler which is called when this object is expired.""" | ||||||
|         from authentik.events.models import Event, EventAction |         from authentik.events.models import Event, EventAction | ||||||
|  |  | ||||||
|  |         if self.intent in [ | ||||||
|  |             TokenIntents.INTENT_RECOVERY, | ||||||
|  |             TokenIntents.INTENT_VERIFICATION, | ||||||
|  |             TokenIntents.INTENT_APP_PASSWORD, | ||||||
|  |         ]: | ||||||
|  |             super().expire_action(*args, **kwargs) | ||||||
|  |             return | ||||||
|  |  | ||||||
|         self.key = default_token_key() |         self.key = default_token_key() | ||||||
|         self.expires = default_token_duration() |         self.expires = default_token_duration() | ||||||
|         self.save(*args, **kwargs) |         self.save(*args, **kwargs) | ||||||
| @ -493,7 +523,7 @@ class PropertyMapping(SerializerModel, ManagedModel): | |||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Type[Serializer]: |     def serializer(self) -> type[Serializer]: | ||||||
|         """Get serializer for this model""" |         """Get serializer for this model""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| """authentik core signals""" | """authentik core signals""" | ||||||
| from typing import TYPE_CHECKING, Type | from typing import TYPE_CHECKING | ||||||
|  |  | ||||||
| from django.contrib.auth.signals import user_logged_in, user_logged_out | from django.contrib.auth.signals import user_logged_in, user_logged_out | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| @ -62,7 +62,7 @@ def user_logged_out_session(sender, request: HttpRequest, user: "User", **_): | |||||||
|  |  | ||||||
|  |  | ||||||
| @receiver(pre_delete) | @receiver(pre_delete) | ||||||
| def authenticated_session_delete(sender: Type[Model], instance: "AuthenticatedSession", **_): | def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_): | ||||||
|     """Delete session when authenticated session is deleted""" |     """Delete session when authenticated session is deleted""" | ||||||
|     from authentik.core.models import AuthenticatedSession |     from authentik.core.models import AuthenticatedSession | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,6 +1,6 @@ | |||||||
| """Source decision helper""" | """Source decision helper""" | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from typing import Any, Optional, Type | from typing import Any, Optional | ||||||
|  |  | ||||||
| from django.contrib import messages | from django.contrib import messages | ||||||
| from django.db import IntegrityError | from django.db import IntegrityError | ||||||
| @ -14,6 +14,7 @@ from structlog.stdlib import get_logger | |||||||
| from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection | ||||||
| from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage | from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
|  | from authentik.flows.exceptions import FlowNonApplicableException | ||||||
| from authentik.flows.models import Flow, Stage, in_memory_stage | from authentik.flows.models import Flow, Stage, in_memory_stage | ||||||
| from authentik.flows.planner import ( | from authentik.flows.planner import ( | ||||||
|     PLAN_CONTEXT_PENDING_USER, |     PLAN_CONTEXT_PENDING_USER, | ||||||
| @ -24,6 +25,8 @@ from authentik.flows.planner import ( | |||||||
| ) | ) | ||||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN | from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN | ||||||
| from authentik.lib.utils.urls import redirect_with_qs | from authentik.lib.utils.urls import redirect_with_qs | ||||||
|  | from authentik.policies.denied import AccessDeniedResponse | ||||||
|  | from authentik.policies.types import PolicyResult | ||||||
| from authentik.policies.utils import delete_none_keys | from authentik.policies.utils import delete_none_keys | ||||||
| from authentik.stages.password import BACKEND_INBUILT | from authentik.stages.password import BACKEND_INBUILT | ||||||
| from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | ||||||
| @ -50,7 +53,10 @@ class SourceFlowManager: | |||||||
|  |  | ||||||
|     identifier: str |     identifier: str | ||||||
|  |  | ||||||
|     connection_type: Type[UserSourceConnection] = UserSourceConnection |     connection_type: type[UserSourceConnection] = UserSourceConnection | ||||||
|  |  | ||||||
|  |     enroll_info: dict[str, Any] | ||||||
|  |     policy_context: dict[str, Any] | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
| @ -64,6 +70,7 @@ class SourceFlowManager: | |||||||
|         self.identifier = identifier |         self.identifier = identifier | ||||||
|         self.enroll_info = enroll_info |         self.enroll_info = enroll_info | ||||||
|         self._logger = get_logger().bind(source=source, identifier=identifier) |         self._logger = get_logger().bind(source=source, identifier=identifier) | ||||||
|  |         self.policy_context = {} | ||||||
|  |  | ||||||
|     # pylint: disable=too-many-return-statements |     # pylint: disable=too-many-return-statements | ||||||
|     def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]: |     def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]: | ||||||
| @ -144,20 +151,23 @@ class SourceFlowManager: | |||||||
|         except IntegrityError as exc: |         except IntegrityError as exc: | ||||||
|             self._logger.warning("failed to get action", exc=exc) |             self._logger.warning("failed to get action", exc=exc) | ||||||
|             return redirect("/") |             return redirect("/") | ||||||
|         self._logger.debug("get_action() says", action=action, connection=connection) |         self._logger.debug("get_action", action=action, connection=connection) | ||||||
|         if connection: |         try: | ||||||
|             if action == Action.LINK: |             if connection: | ||||||
|                 self._logger.debug("Linking existing user") |                 if action == Action.LINK: | ||||||
|                 return self.handle_existing_user_link(connection) |                     self._logger.debug("Linking existing user") | ||||||
|             if action == Action.AUTH: |                     return self.handle_existing_user_link(connection) | ||||||
|                 self._logger.debug("Handling auth user") |                 if action == Action.AUTH: | ||||||
|                 return self.handle_auth_user(connection) |                     self._logger.debug("Handling auth user") | ||||||
|             if action == Action.ENROLL: |                     return self.handle_auth_user(connection) | ||||||
|                 self._logger.debug("Handling enrollment of new user") |                 if action == Action.ENROLL: | ||||||
|                 return self.handle_enroll(connection) |                     self._logger.debug("Handling enrollment of new user") | ||||||
|  |                     return self.handle_enroll(connection) | ||||||
|  |         except FlowNonApplicableException as exc: | ||||||
|  |             self._logger.warning("Flow non applicable", exc=exc) | ||||||
|  |             return self.error_handler(exc, exc.policy_result) | ||||||
|         # Default case, assume deny |         # Default case, assume deny | ||||||
|         messages.error( |         error = ( | ||||||
|             self.request, |  | ||||||
|             _( |             _( | ||||||
|                 ( |                 ( | ||||||
|                     "Request to authenticate with %(source)s has been denied. Please authenticate " |                     "Request to authenticate with %(source)s has been denied. Please authenticate " | ||||||
| @ -166,7 +176,17 @@ class SourceFlowManager: | |||||||
|                 % {"source": self.source.name} |                 % {"source": self.source.name} | ||||||
|             ), |             ), | ||||||
|         ) |         ) | ||||||
|         return redirect(reverse("authentik_core:root-redirect")) |         return self.error_handler(error) | ||||||
|  |  | ||||||
|  |     def error_handler( | ||||||
|  |         self, error: Exception, policy_result: Optional[PolicyResult] = None | ||||||
|  |     ) -> HttpResponse: | ||||||
|  |         """Handle any errors by returning an access denied stage""" | ||||||
|  |         response = AccessDeniedResponse(self.request) | ||||||
|  |         response.error_message = str(error) | ||||||
|  |         if policy_result: | ||||||
|  |             response.policy_result = policy_result | ||||||
|  |         return response | ||||||
|  |  | ||||||
|     # pylint: disable=unused-argument |     # pylint: disable=unused-argument | ||||||
|     def get_stages_to_append(self, flow: Flow) -> list[Stage]: |     def get_stages_to_append(self, flow: Flow) -> list[Stage]: | ||||||
| @ -179,7 +199,9 @@ class SourceFlowManager: | |||||||
|             ] |             ] | ||||||
|         return [] |         return [] | ||||||
|  |  | ||||||
|     def _handle_login_flow(self, flow: Flow, **kwargs) -> HttpResponse: |     def _handle_login_flow( | ||||||
|  |         self, flow: Flow, connection: UserSourceConnection, **kwargs | ||||||
|  |     ) -> HttpResponse: | ||||||
|         """Prepare Authentication Plan, redirect user FlowExecutor""" |         """Prepare Authentication Plan, redirect user FlowExecutor""" | ||||||
|         # Ensure redirect is carried through when user was trying to |         # Ensure redirect is carried through when user was trying to | ||||||
|         # authorize application |         # authorize application | ||||||
| @ -193,8 +215,10 @@ class SourceFlowManager: | |||||||
|                 PLAN_CONTEXT_SSO: True, |                 PLAN_CONTEXT_SSO: True, | ||||||
|                 PLAN_CONTEXT_SOURCE: self.source, |                 PLAN_CONTEXT_SOURCE: self.source, | ||||||
|                 PLAN_CONTEXT_REDIRECT: final_redirect, |                 PLAN_CONTEXT_REDIRECT: final_redirect, | ||||||
|  |                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, | ||||||
|             } |             } | ||||||
|         ) |         ) | ||||||
|  |         kwargs.update(self.policy_context) | ||||||
|         if not flow: |         if not flow: | ||||||
|             return HttpResponseBadRequest() |             return HttpResponseBadRequest() | ||||||
|         # We run the Flow planner here so we can pass the Pending user in the context |         # We run the Flow planner here so we can pass the Pending user in the context | ||||||
| @ -220,7 +244,7 @@ class SourceFlowManager: | |||||||
|             _("Successfully authenticated with %(source)s!" % {"source": self.source.name}), |             _("Successfully authenticated with %(source)s!" % {"source": self.source.name}), | ||||||
|         ) |         ) | ||||||
|         flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user} |         flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user} | ||||||
|         return self._handle_login_flow(self.source.authentication_flow, **flow_kwargs) |         return self._handle_login_flow(self.source.authentication_flow, connection, **flow_kwargs) | ||||||
|  |  | ||||||
|     def handle_existing_user_link( |     def handle_existing_user_link( | ||||||
|         self, |         self, | ||||||
| @ -264,8 +288,8 @@ class SourceFlowManager: | |||||||
|             return HttpResponseBadRequest() |             return HttpResponseBadRequest() | ||||||
|         return self._handle_login_flow( |         return self._handle_login_flow( | ||||||
|             self.source.enrollment_flow, |             self.source.enrollment_flow, | ||||||
|  |             connection, | ||||||
|             **{ |             **{ | ||||||
|                 PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info), |                 PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info), | ||||||
|                 PLAN_CONTEXT_SOURCES_CONNECTION: connection, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -6,7 +6,6 @@ from os import environ | |||||||
| from boto3.exceptions import Boto3Error | from boto3.exceptions import Boto3Error | ||||||
| from botocore.exceptions import BotoCoreError, ClientError | from botocore.exceptions import BotoCoreError, ClientError | ||||||
| from dbbackup.db.exceptions import CommandConnectorError | from dbbackup.db.exceptions import CommandConnectorError | ||||||
| from django.conf import settings |  | ||||||
| from django.contrib.humanize.templatetags.humanize import naturaltime | from django.contrib.humanize.templatetags.humanize import naturaltime | ||||||
| from django.contrib.sessions.backends.cache import KEY_PREFIX | from django.contrib.sessions.backends.cache import KEY_PREFIX | ||||||
| from django.core import management | from django.core import management | ||||||
| @ -63,8 +62,6 @@ def should_backup() -> bool: | |||||||
|         return False |         return False | ||||||
|     if not CONFIG.y_bool("postgresql.backup.enabled"): |     if not CONFIG.y_bool("postgresql.backup.enabled"): | ||||||
|         return False |         return False | ||||||
|     if settings.DEBUG: |  | ||||||
|         return False |  | ||||||
|     return True |     return True | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -19,6 +19,7 @@ | |||||||
|         <script src="{% static 'dist/poly.js' %}" type="module"></script> |         <script src="{% static 'dist/poly.js' %}" type="module"></script> | ||||||
|         {% block head %} |         {% block head %} | ||||||
|         {% endblock %} |         {% endblock %} | ||||||
|  |         <meta name="sentry-trace" content="{{ sentry_trace }}" /> | ||||||
|     </head> |     </head> | ||||||
|     <body> |     <body> | ||||||
|         {% block body %} |         {% block body %} | ||||||
|  | |||||||
| @ -5,6 +5,8 @@ | |||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script> | <script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script> | ||||||
|  | <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> | ||||||
|  | <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block body %} | {% block body %} | ||||||
|  | |||||||
| @ -5,6 +5,8 @@ | |||||||
|  |  | ||||||
| {% block head %} | {% block head %} | ||||||
| <script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script> | <script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script> | ||||||
|  | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> | ||||||
|  | <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> | ||||||
| {% endblock %} | {% endblock %} | ||||||
|  |  | ||||||
| {% block body %} | {% block body %} | ||||||
|  | |||||||
| @ -1,6 +1,5 @@ | |||||||
| """Test Applications API""" | """Test Applications API""" | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from django.utils.encoding import force_str |  | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.models import Application | from authentik.core.models import Application | ||||||
| @ -32,7 +31,7 @@ class TestApplicationsAPI(APITestCase): | |||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual(force_str(response.content), {"messages": [], "passing": True}) |         self.assertJSONEqual(response.content.decode(), {"messages": [], "passing": True}) | ||||||
|         response = self.client.get( |         response = self.client.get( | ||||||
|             reverse( |             reverse( | ||||||
|                 "authentik_api:application-check-access", |                 "authentik_api:application-check-access", | ||||||
| @ -40,14 +39,14 @@ class TestApplicationsAPI(APITestCase): | |||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual(force_str(response.content), {"messages": ["dummy"], "passing": False}) |         self.assertJSONEqual(response.content.decode(), {"messages": ["dummy"], "passing": False}) | ||||||
|  |  | ||||||
|     def test_list(self): |     def test_list(self): | ||||||
|         """Test list operation without superuser_full_list""" |         """Test list operation without superuser_full_list""" | ||||||
|         self.client.force_login(self.user) |         self.client.force_login(self.user) | ||||||
|         response = self.client.get(reverse("authentik_api:application-list")) |         response = self.client.get(reverse("authentik_api:application-list")) | ||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             force_str(response.content), |             response.content.decode(), | ||||||
|             { |             { | ||||||
|                 "pagination": { |                 "pagination": { | ||||||
|                     "next": 0, |                     "next": 0, | ||||||
| @ -83,7 +82,7 @@ class TestApplicationsAPI(APITestCase): | |||||||
|             reverse("authentik_api:application-list") + "?superuser_full_list=true" |             reverse("authentik_api:application-list") + "?superuser_full_list=true" | ||||||
|         ) |         ) | ||||||
|         self.assertJSONEqual( |         self.assertJSONEqual( | ||||||
|             force_str(response.content), |             response.content.decode(), | ||||||
|             { |             { | ||||||
|                 "pagination": { |                 "pagination": { | ||||||
|                     "next": 0, |                     "next": 0, | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
| from json import loads | from json import loads | ||||||
|  |  | ||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| from django.utils.encoding import force_str |  | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| @ -28,5 +27,5 @@ class TestAuthenticatedSessionsAPI(APITestCase): | |||||||
|         self.client.force_login(self.other_user) |         self.client.force_login(self.other_user) | ||||||
|         response = self.client.get(reverse("authentik_api:authenticatedsession-list")) |         response = self.client.get(reverse("authentik_api:authenticatedsession-list")) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         body = loads(force_str(response.content)) |         body = loads(response.content.decode()) | ||||||
|         self.assertEqual(body["pagination"]["count"], 1) |         self.assertEqual(body["pagination"]["count"], 1) | ||||||
|  | |||||||
| @ -1,8 +1,8 @@ | |||||||
| """authentik core models tests""" | """authentik core models tests""" | ||||||
| from time import sleep | from time import sleep | ||||||
| from typing import Callable, Type | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import RequestFactory, TestCase | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| from guardian.shortcuts import get_anonymous_user | from guardian.shortcuts import get_anonymous_user | ||||||
|  |  | ||||||
| @ -27,9 +27,12 @@ class TestModels(TestCase): | |||||||
|         self.assertFalse(token.is_expired) |         self.assertFalse(token.is_expired) | ||||||
|  |  | ||||||
|  |  | ||||||
| def source_tester_factory(test_model: Type[Stage]) -> Callable: | def source_tester_factory(test_model: type[Stage]) -> Callable: | ||||||
|     """Test source""" |     """Test source""" | ||||||
|  |  | ||||||
|  |     factory = RequestFactory() | ||||||
|  |     request = factory.get("/") | ||||||
|  |  | ||||||
|     def tester(self: TestModels): |     def tester(self: TestModels): | ||||||
|         model_class = None |         model_class = None | ||||||
|         if test_model._meta.abstract: |         if test_model._meta.abstract: | ||||||
| @ -38,13 +41,13 @@ def source_tester_factory(test_model: Type[Stage]) -> Callable: | |||||||
|             model_class = test_model() |             model_class = test_model() | ||||||
|         model_class.slug = "test" |         model_class.slug = "test" | ||||||
|         self.assertIsNotNone(model_class.component) |         self.assertIsNotNone(model_class.component) | ||||||
|         _ = model_class.ui_login_button |         _ = model_class.ui_login_button(request) | ||||||
|         _ = model_class.ui_user_settings |         _ = model_class.ui_user_settings() | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  |  | ||||||
|  |  | ||||||
| def provider_tester_factory(test_model: Type[Stage]) -> Callable: | def provider_tester_factory(test_model: type[Stage]) -> Callable: | ||||||
|     """Test provider""" |     """Test provider""" | ||||||
|  |  | ||||||
|     def tester(self: TestModels): |     def tester(self: TestModels): | ||||||
|  | |||||||
| @ -41,7 +41,7 @@ class TestPropertyMappingAPI(APITestCase): | |||||||
|         expr = "return True" |         expr = "return True" | ||||||
|         self.assertEqual(PropertyMappingSerializer().validate_expression(expr), expr) |         self.assertEqual(PropertyMappingSerializer().validate_expression(expr), expr) | ||||||
|         with self.assertRaises(ValidationError): |         with self.assertRaises(ValidationError): | ||||||
|             print(PropertyMappingSerializer().validate_expression("/")) |             PropertyMappingSerializer().validate_expression("/") | ||||||
|  |  | ||||||
|     def test_types(self): |     def test_types(self): | ||||||
|         """Test PropertyMappigns's types endpoint""" |         """Test PropertyMappigns's types endpoint""" | ||||||
|  | |||||||
| @ -6,8 +6,12 @@ from guardian.utils import get_anonymous_user | |||||||
|  |  | ||||||
| from authentik.core.models import SourceUserMatchingModes, User | from authentik.core.models import SourceUserMatchingModes, User | ||||||
| from authentik.core.sources.flow_manager import Action | from authentik.core.sources.flow_manager import Action | ||||||
|  | from authentik.flows.models import Flow, FlowDesignation | ||||||
| from authentik.lib.generators import generate_id | from authentik.lib.generators import generate_id | ||||||
| from authentik.lib.tests.utils import get_request | from authentik.lib.tests.utils import get_request | ||||||
|  | from authentik.policies.denied import AccessDeniedResponse | ||||||
|  | from authentik.policies.expression.models import ExpressionPolicy | ||||||
|  | from authentik.policies.models import PolicyBinding | ||||||
| from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection | from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection | ||||||
| from authentik.sources.oauth.views.callback import OAuthSourceFlowManager | from authentik.sources.oauth.views.callback import OAuthSourceFlowManager | ||||||
|  |  | ||||||
| @ -17,7 +21,7 @@ class TestSourceFlowManager(TestCase): | |||||||
|  |  | ||||||
|     def setUp(self) -> None: |     def setUp(self) -> None: | ||||||
|         super().setUp() |         super().setUp() | ||||||
|         self.source = OAuthSource.objects.create(name="test") |         self.source: OAuthSource = OAuthSource.objects.create(name="test") | ||||||
|         self.factory = RequestFactory() |         self.factory = RequestFactory() | ||||||
|         self.identifier = generate_id() |         self.identifier = generate_id() | ||||||
|  |  | ||||||
| @ -143,3 +147,34 @@ class TestSourceFlowManager(TestCase): | |||||||
|         action, _ = flow_manager.get_action() |         action, _ = flow_manager.get_action() | ||||||
|         self.assertEqual(action, Action.ENROLL) |         self.assertEqual(action, Action.ENROLL) | ||||||
|         flow_manager.get_flow() |         flow_manager.get_flow() | ||||||
|  |  | ||||||
|  |     def test_error_non_applicable_flow(self): | ||||||
|  |         """Test error handling when a source selected flow is non-applicable due to a policy""" | ||||||
|  |         self.source.user_matching_mode = SourceUserMatchingModes.USERNAME_LINK | ||||||
|  |  | ||||||
|  |         flow = Flow.objects.create( | ||||||
|  |             name="test", slug="test", title="test", designation=FlowDesignation.ENROLLMENT | ||||||
|  |         ) | ||||||
|  |         policy = ExpressionPolicy.objects.create( | ||||||
|  |             name="false", expression="""ak_message("foo");return False""" | ||||||
|  |         ) | ||||||
|  |         PolicyBinding.objects.create( | ||||||
|  |             policy=policy, | ||||||
|  |             target=flow, | ||||||
|  |             order=0, | ||||||
|  |         ) | ||||||
|  |         self.source.enrollment_flow = flow | ||||||
|  |         self.source.save() | ||||||
|  |  | ||||||
|  |         flow_manager = OAuthSourceFlowManager( | ||||||
|  |             self.source, | ||||||
|  |             get_request("/", user=AnonymousUser()), | ||||||
|  |             self.identifier, | ||||||
|  |             {"username": "foo"}, | ||||||
|  |         ) | ||||||
|  |         action, _ = flow_manager.get_action() | ||||||
|  |         self.assertEqual(action, Action.ENROLL) | ||||||
|  |         response = flow_manager.get_flow() | ||||||
|  |         self.assertIsInstance(response, AccessDeniedResponse) | ||||||
|  |         # pylint: disable=no-member | ||||||
|  |         self.assertEqual(response.error_message, "foo") | ||||||
|  | |||||||
| @ -54,7 +54,9 @@ class TestTokenAPI(APITestCase): | |||||||
|  |  | ||||||
|     def test_token_expire(self): |     def test_token_expire(self): | ||||||
|         """Test Token expire task""" |         """Test Token expire task""" | ||||||
|         token: Token = Token.objects.create(expires=now(), user=get_anonymous_user()) |         token: Token = Token.objects.create( | ||||||
|  |             expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API | ||||||
|  |         ) | ||||||
|         key = token.key |         key = token.key | ||||||
|         clean_expired_models.delay().get() |         clean_expired_models.delay().get() | ||||||
|         token.refresh_from_db() |         token.refresh_from_db() | ||||||
|  | |||||||
| @ -2,9 +2,15 @@ | |||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| from rest_framework.test import APITestCase | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
| from authentik.core.models import USER_ATTRIBUTE_CHANGE_EMAIL, USER_ATTRIBUTE_CHANGE_USERNAME, User | from authentik.core.models import ( | ||||||
|  |     USER_ATTRIBUTE_CHANGE_EMAIL, | ||||||
|  |     USER_ATTRIBUTE_CHANGE_NAME, | ||||||
|  |     USER_ATTRIBUTE_CHANGE_USERNAME, | ||||||
|  |     User, | ||||||
|  | ) | ||||||
| from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant | from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant | ||||||
| from authentik.flows.models import FlowDesignation | from authentik.flows.models import FlowDesignation | ||||||
|  | from authentik.lib.generators import generate_key | ||||||
| from authentik.stages.email.models import EmailStage | from authentik.stages.email.models import EmailStage | ||||||
| from authentik.tenants.models import Tenant | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| @ -18,11 +24,28 @@ class TestUsersAPI(APITestCase): | |||||||
|  |  | ||||||
|     def test_update_self(self): |     def test_update_self(self): | ||||||
|         """Test update_self""" |         """Test update_self""" | ||||||
|  |         self.admin.attributes["foo"] = "bar" | ||||||
|  |         self.admin.save() | ||||||
|  |         self.admin.refresh_from_db() | ||||||
|         self.client.force_login(self.admin) |         self.client.force_login(self.admin) | ||||||
|         response = self.client.put( |         response = self.client.put( | ||||||
|             reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"} |             reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"} | ||||||
|         ) |         ) | ||||||
|  |         self.admin.refresh_from_db() | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|  |         self.assertEqual(self.admin.attributes["foo"], "bar") | ||||||
|  |         self.assertEqual(self.admin.username, "foo") | ||||||
|  |         self.assertEqual(self.admin.name, "foo") | ||||||
|  |  | ||||||
|  |     def test_update_self_name_denied(self): | ||||||
|  |         """Test update_self""" | ||||||
|  |         self.admin.attributes[USER_ATTRIBUTE_CHANGE_NAME] = False | ||||||
|  |         self.admin.save() | ||||||
|  |         self.client.force_login(self.admin) | ||||||
|  |         response = self.client.put( | ||||||
|  |             reverse("authentik_api:user-update-self"), data={"username": "foo", "name": "foo"} | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 400) | ||||||
|  |  | ||||||
|     def test_update_self_username_denied(self): |     def test_update_self_username_denied(self): | ||||||
|         """Test update_self""" |         """Test update_self""" | ||||||
| @ -68,6 +91,18 @@ class TestUsersAPI(APITestCase): | |||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 404) |         self.assertEqual(response.status_code, 404) | ||||||
|  |  | ||||||
|  |     def test_set_password(self): | ||||||
|  |         """Test Direct password set""" | ||||||
|  |         self.client.force_login(self.admin) | ||||||
|  |         new_pw = generate_key() | ||||||
|  |         response = self.client.post( | ||||||
|  |             reverse("authentik_api:user-set-password", kwargs={"pk": self.admin.pk}), | ||||||
|  |             data={"password": new_pw}, | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(response.status_code, 204) | ||||||
|  |         self.admin.refresh_from_db() | ||||||
|  |         self.assertTrue(self.admin.check_password(new_pw)) | ||||||
|  |  | ||||||
|     def test_recovery(self): |     def test_recovery(self): | ||||||
|         """Test user recovery link (no recovery flow set)""" |         """Test user recovery link (no recovery flow set)""" | ||||||
|         flow = create_test_flow(FlowDesignation.RECOVERY) |         flow = create_test_flow(FlowDesignation.RECOVERY) | ||||||
|  | |||||||
| @ -29,3 +29,4 @@ class UserSettingSerializer(PassiveSerializer): | |||||||
|     component = CharField() |     component = CharField() | ||||||
|     title = CharField() |     title = CharField() | ||||||
|     configure_url = CharField(required=False) |     configure_url = CharField(required=False) | ||||||
|  |     icon_url = CharField() | ||||||
|  | |||||||
| @ -1,4 +1,6 @@ | |||||||
| """Crypto API Views""" | """Crypto API Views""" | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
| from cryptography.hazmat.backends import default_backend | from cryptography.hazmat.backends import default_backend | ||||||
| from cryptography.hazmat.primitives.serialization import load_pem_private_key | from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||||
| from cryptography.x509 import load_pem_x509_certificate | from cryptography.x509 import load_pem_x509_certificate | ||||||
| @ -31,6 +33,7 @@ class CertificateKeyPairSerializer(ModelSerializer): | |||||||
|     cert_expiry = DateTimeField(source="certificate.not_valid_after", read_only=True) |     cert_expiry = DateTimeField(source="certificate.not_valid_after", read_only=True) | ||||||
|     cert_subject = SerializerMethodField() |     cert_subject = SerializerMethodField() | ||||||
|     private_key_available = SerializerMethodField() |     private_key_available = SerializerMethodField() | ||||||
|  |     private_key_type = SerializerMethodField() | ||||||
|  |  | ||||||
|     certificate_download_url = SerializerMethodField() |     certificate_download_url = SerializerMethodField() | ||||||
|     private_key_download_url = SerializerMethodField() |     private_key_download_url = SerializerMethodField() | ||||||
| @ -43,6 +46,13 @@ class CertificateKeyPairSerializer(ModelSerializer): | |||||||
|         """Show if this keypair has a private key configured or not""" |         """Show if this keypair has a private key configured or not""" | ||||||
|         return instance.key_data != "" and instance.key_data is not None |         return instance.key_data != "" and instance.key_data is not None | ||||||
|  |  | ||||||
|  |     def get_private_key_type(self, instance: CertificateKeyPair) -> Optional[str]: | ||||||
|  |         """Get the private key's type, if set""" | ||||||
|  |         key = instance.private_key | ||||||
|  |         if key: | ||||||
|  |             return key.__class__.__name__.replace("_", "").lower().replace("privatekey", "") | ||||||
|  |         return None | ||||||
|  |  | ||||||
|     def get_certificate_download_url(self, instance: CertificateKeyPair) -> str: |     def get_certificate_download_url(self, instance: CertificateKeyPair) -> str: | ||||||
|         """Get URL to download certificate""" |         """Get URL to download certificate""" | ||||||
|         return ( |         return ( | ||||||
| @ -72,7 +82,7 @@ class CertificateKeyPairSerializer(ModelSerializer): | |||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def validate_key_data(self, value: str) -> str: |     def validate_key_data(self, value: str) -> str: | ||||||
|         """Verify that input is a valid PEM RSA Key""" |         """Verify that input is a valid PEM Key""" | ||||||
|         # Since this field is optional, data can be empty. |         # Since this field is optional, data can be empty. | ||||||
|         if value != "": |         if value != "": | ||||||
|             try: |             try: | ||||||
| @ -98,6 +108,7 @@ class CertificateKeyPairSerializer(ModelSerializer): | |||||||
|             "cert_expiry", |             "cert_expiry", | ||||||
|             "cert_subject", |             "cert_subject", | ||||||
|             "private_key_available", |             "private_key_available", | ||||||
|  |             "private_key_type", | ||||||
|             "certificate_download_url", |             "certificate_download_url", | ||||||
|             "private_key_download_url", |             "private_key_download_url", | ||||||
|             "managed", |             "managed", | ||||||
|  | |||||||
| @ -44,7 +44,7 @@ class CertificateBuilder: | |||||||
|         """Build self-signed certificate""" |         """Build self-signed certificate""" | ||||||
|         one_day = datetime.timedelta(1, 0, 0) |         one_day = datetime.timedelta(1, 0, 0) | ||||||
|         self.__private_key = rsa.generate_private_key( |         self.__private_key = rsa.generate_private_key( | ||||||
|             public_exponent=65537, key_size=2048, backend=default_backend() |             public_exponent=65537, key_size=4096, backend=default_backend() | ||||||
|         ) |         ) | ||||||
|         self.__public_key = self.__private_key.public_key() |         self.__public_key = self.__private_key.public_key() | ||||||
|         alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []] |         alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []] | ||||||
|  | |||||||
| @ -6,15 +6,23 @@ from uuid import uuid4 | |||||||
|  |  | ||||||
| from cryptography.hazmat.backends import default_backend | from cryptography.hazmat.backends import default_backend | ||||||
| from cryptography.hazmat.primitives import hashes | from cryptography.hazmat.primitives import hashes | ||||||
|  | from cryptography.hazmat.primitives.asymmetric.ec import ( | ||||||
|  |     EllipticCurvePrivateKey, | ||||||
|  |     EllipticCurvePublicKey, | ||||||
|  | ) | ||||||
|  | from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey, Ed25519PublicKey | ||||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey | from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey | ||||||
| from cryptography.hazmat.primitives.serialization import load_pem_private_key | from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||||
| from cryptography.x509 import Certificate, load_pem_x509_certificate | from cryptography.x509 import Certificate, load_pem_x509_certificate | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
|  | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.lib.models import CreatedUpdatedModel | from authentik.lib.models import CreatedUpdatedModel | ||||||
| from authentik.managed.models import ManagedModel | from authentik.managed.models import ManagedModel | ||||||
|  |  | ||||||
|  | LOGGER = get_logger() | ||||||
|  |  | ||||||
|  |  | ||||||
| class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | ||||||
|     """CertificateKeyPair that can be used for signing or encrypting if `key_data` |     """CertificateKeyPair that can be used for signing or encrypting if `key_data` | ||||||
| @ -33,8 +41,8 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     _cert: Optional[Certificate] = None |     _cert: Optional[Certificate] = None | ||||||
|     _private_key: Optional[RSAPrivateKey] = None |     _private_key: Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey] = None | ||||||
|     _public_key: Optional[RSAPublicKey] = None |     _public_key: Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey] = None | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def certificate(self) -> Certificate: |     def certificate(self) -> Certificate: | ||||||
| @ -46,14 +54,16 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | |||||||
|         return self._cert |         return self._cert | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def public_key(self) -> Optional[RSAPublicKey]: |     def public_key(self) -> Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey]: | ||||||
|         """Get public key of the private key""" |         """Get public key of the private key""" | ||||||
|         if not self._public_key: |         if not self._public_key: | ||||||
|             self._public_key = self.private_key.public_key() |             self._public_key = self.private_key.public_key() | ||||||
|         return self._public_key |         return self._public_key | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def private_key(self) -> Optional[RSAPrivateKey]: |     def private_key( | ||||||
|  |         self, | ||||||
|  |     ) -> Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey]: | ||||||
|         """Get python cryptography PrivateKey instance""" |         """Get python cryptography PrivateKey instance""" | ||||||
|         if not self._private_key and self.key_data != "": |         if not self._private_key and self.key_data != "": | ||||||
|             try: |             try: | ||||||
| @ -62,7 +72,8 @@ class CertificateKeyPair(ManagedModel, CreatedUpdatedModel): | |||||||
|                     password=None, |                     password=None, | ||||||
|                     backend=default_backend(), |                     backend=default_backend(), | ||||||
|                 ) |                 ) | ||||||
|             except ValueError: |             except ValueError as exc: | ||||||
|  |                 LOGGER.warning(exc) | ||||||
|                 return None |                 return None | ||||||
|         return self._private_key |         return self._private_key | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,6 +2,9 @@ | |||||||
| from glob import glob | from glob import glob | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
|  |  | ||||||
|  | from cryptography.hazmat.backends import default_backend | ||||||
|  | from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||||
|  | from cryptography.x509.base import load_pem_x509_certificate | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| @ -20,10 +23,26 @@ LOGGER = get_logger() | |||||||
| MANAGED_DISCOVERED = "goauthentik.io/crypto/discovered/%s" | MANAGED_DISCOVERED = "goauthentik.io/crypto/discovered/%s" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def ensure_private_key_valid(body: str): | ||||||
|  |     """Attempt loading of a PEM Private key without password""" | ||||||
|  |     load_pem_private_key( | ||||||
|  |         str.encode("\n".join([x.strip() for x in body.split("\n")])), | ||||||
|  |         password=None, | ||||||
|  |         backend=default_backend(), | ||||||
|  |     ) | ||||||
|  |     return body | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def ensure_certificate_valid(body: str): | ||||||
|  |     """Attempt loading of a PEM-encoded certificate""" | ||||||
|  |     load_pem_x509_certificate(body.encode("utf-8"), default_backend()) | ||||||
|  |     return body | ||||||
|  |  | ||||||
|  |  | ||||||
| @CELERY_APP.task(bind=True, base=MonitoredTask) | @CELERY_APP.task(bind=True, base=MonitoredTask) | ||||||
| @prefill_task | @prefill_task | ||||||
| def certificate_discovery(self: MonitoredTask): | def certificate_discovery(self: MonitoredTask): | ||||||
|     """Discover and update certificates form the filesystem""" |     """Discover, import and update certificates from the filesystem""" | ||||||
|     certs = {} |     certs = {} | ||||||
|     private_keys = {} |     private_keys = {} | ||||||
|     discovered = 0 |     discovered = 0 | ||||||
| @ -33,6 +52,9 @@ def certificate_discovery(self: MonitoredTask): | |||||||
|             continue |             continue | ||||||
|         if path.is_dir(): |         if path.is_dir(): | ||||||
|             continue |             continue | ||||||
|  |         # For certbot setups, we want to ignore archive. | ||||||
|  |         if "archive" in file: | ||||||
|  |             continue | ||||||
|         # Support certbot's directory structure |         # Support certbot's directory structure | ||||||
|         if path.name in ["fullchain.pem", "privkey.pem"]: |         if path.name in ["fullchain.pem", "privkey.pem"]: | ||||||
|             cert_name = path.parent.name |             cert_name = path.parent.name | ||||||
| @ -41,12 +63,12 @@ def certificate_discovery(self: MonitoredTask): | |||||||
|         try: |         try: | ||||||
|             with open(path, "r+", encoding="utf-8") as _file: |             with open(path, "r+", encoding="utf-8") as _file: | ||||||
|                 body = _file.read() |                 body = _file.read() | ||||||
|                 if "BEGIN RSA PRIVATE KEY" in body: |                 if "PRIVATE KEY" in body: | ||||||
|                     private_keys[cert_name] = body |                     private_keys[cert_name] = ensure_private_key_valid(body) | ||||||
|                 else: |                 else: | ||||||
|                     certs[cert_name] = body |                     certs[cert_name] = ensure_certificate_valid(body) | ||||||
|         except OSError as exc: |         except (OSError, ValueError) as exc: | ||||||
|             LOGGER.warning("Failed to open file", exc=exc, file=path) |             LOGGER.warning("Failed to open file or invalid format", exc=exc, file=path) | ||||||
|         discovered += 1 |         discovered += 1 | ||||||
|     for name, cert_data in certs.items(): |     for name, cert_data in certs.items(): | ||||||
|         cert = CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % name).first() |         cert = CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % name).first() | ||||||
| @ -60,7 +82,7 @@ def certificate_discovery(self: MonitoredTask): | |||||||
|             cert.certificate_data = cert_data |             cert.certificate_data = cert_data | ||||||
|             dirty = True |             dirty = True | ||||||
|         if name in private_keys: |         if name in private_keys: | ||||||
|             if cert.key_data == private_keys[name]: |             if cert.key_data != private_keys[name]: | ||||||
|                 cert.key_data = private_keys[name] |                 cert.key_data = private_keys[name] | ||||||
|                 dirty = True |                 dirty = True | ||||||
|         if dirty: |         if dirty: | ||||||
|  | |||||||
| @ -146,7 +146,7 @@ class TestCrypto(APITestCase): | |||||||
|             client_secret=generate_key(), |             client_secret=generate_key(), | ||||||
|             authorization_flow=create_test_flow(), |             authorization_flow=create_test_flow(), | ||||||
|             redirect_uris="http://localhost", |             redirect_uris="http://localhost", | ||||||
|             rsa_key=keypair, |             signing_key=keypair, | ||||||
|         ) |         ) | ||||||
|         response = self.client.get( |         response = self.client.get( | ||||||
|             reverse( |             reverse( | ||||||
| @ -191,9 +191,12 @@ class TestCrypto(APITestCase): | |||||||
|             with CONFIG.patch("cert_discovery_dir", temp_dir): |             with CONFIG.patch("cert_discovery_dir", temp_dir): | ||||||
|                 # pyright: reportGeneralTypeIssues=false |                 # pyright: reportGeneralTypeIssues=false | ||||||
|                 certificate_discovery()  # pylint: disable=no-value-for-parameter |                 certificate_discovery()  # pylint: disable=no-value-for-parameter | ||||||
|         self.assertTrue( |         keypair: CertificateKeyPair = CertificateKeyPair.objects.filter( | ||||||
|             CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo").exists() |             managed=MANAGED_DISCOVERED % "foo" | ||||||
|         ) |         ).first() | ||||||
|  |         self.assertIsNotNone(keypair) | ||||||
|  |         self.assertIsNotNone(keypair.certificate) | ||||||
|  |         self.assertIsNotNone(keypair.private_key) | ||||||
|         self.assertTrue( |         self.assertTrue( | ||||||
|             CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists() |             CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists() | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,4 +1,6 @@ | |||||||
| """Events API Views""" | """Events API Views""" | ||||||
|  | from json import loads | ||||||
|  |  | ||||||
| import django_filters | import django_filters | ||||||
| from django.db.models.aggregates import Count | from django.db.models.aggregates import Count | ||||||
| from django.db.models.fields.json import KeyTextTransform | from django.db.models.fields.json import KeyTextTransform | ||||||
| @ -12,6 +14,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.serializers import ModelSerializer | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik.admin.api.metrics import CoordinateSerializer | ||||||
| from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer | from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer | ||||||
| from authentik.events.models import Event, EventAction | from authentik.events.models import Event, EventAction | ||||||
|  |  | ||||||
| @ -110,13 +113,20 @@ class EventViewSet(ModelViewSet): | |||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         methods=["GET"], |         methods=["GET"], | ||||||
|         responses={200: EventTopPerUserSerializer(many=True)}, |         responses={200: EventTopPerUserSerializer(many=True)}, | ||||||
|  |         filters=[], | ||||||
|         parameters=[ |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "action", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|             OpenApiParameter( |             OpenApiParameter( | ||||||
|                 "top_n", |                 "top_n", | ||||||
|                 type=OpenApiTypes.INT, |                 type=OpenApiTypes.INT, | ||||||
|                 location=OpenApiParameter.QUERY, |                 location=OpenApiParameter.QUERY, | ||||||
|                 required=False, |                 required=False, | ||||||
|             ) |             ), | ||||||
|         ], |         ], | ||||||
|     ) |     ) | ||||||
|     @action(detail=False, methods=["GET"], pagination_class=None) |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
| @ -137,6 +147,40 @@ class EventViewSet(ModelViewSet): | |||||||
|             .order_by("-counted_events")[:top_n] |             .order_by("-counted_events")[:top_n] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |     @extend_schema( | ||||||
|  |         methods=["GET"], | ||||||
|  |         responses={200: CoordinateSerializer(many=True)}, | ||||||
|  |         filters=[], | ||||||
|  |         parameters=[ | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "action", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |             OpenApiParameter( | ||||||
|  |                 "query", | ||||||
|  |                 type=OpenApiTypes.STR, | ||||||
|  |                 location=OpenApiParameter.QUERY, | ||||||
|  |                 required=False, | ||||||
|  |             ), | ||||||
|  |         ], | ||||||
|  |     ) | ||||||
|  |     @action(detail=False, methods=["GET"], pagination_class=None) | ||||||
|  |     def per_month(self, request: Request): | ||||||
|  |         """Get the count of events per month""" | ||||||
|  |         filtered_action = request.query_params.get("action", EventAction.LOGIN) | ||||||
|  |         try: | ||||||
|  |             query = loads(request.query_params.get("query", "{}")) | ||||||
|  |         except ValueError: | ||||||
|  |             return Response(status=400) | ||||||
|  |         return Response( | ||||||
|  |             get_objects_for_user(request.user, "authentik_events.view_event") | ||||||
|  |             .filter(action=filtered_action) | ||||||
|  |             .filter(**query) | ||||||
|  |             .get_events_per_day() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) |     @extend_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||||
|     @action(detail=False, pagination_class=None, filter_backends=[]) |     @action(detail=False, pagination_class=None, filter_backends=[]) | ||||||
|     def actions(self, request: Request) -> Response: |     def actions(self, request: Request) -> Response: | ||||||
|  | |||||||
| @ -15,12 +15,14 @@ from authentik.api.decorators import permission_required | |||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import PassiveSerializer | from authentik.core.api.utils import PassiveSerializer | ||||||
| from authentik.events.models import ( | from authentik.events.models import ( | ||||||
|  |     Event, | ||||||
|     Notification, |     Notification, | ||||||
|     NotificationSeverity, |     NotificationSeverity, | ||||||
|     NotificationTransport, |     NotificationTransport, | ||||||
|     NotificationTransportError, |     NotificationTransportError, | ||||||
|     TransportMode, |     TransportMode, | ||||||
| ) | ) | ||||||
|  | from authentik.events.utils import get_user | ||||||
|  |  | ||||||
|  |  | ||||||
| class NotificationTransportSerializer(ModelSerializer): | class NotificationTransportSerializer(ModelSerializer): | ||||||
| @ -86,6 +88,12 @@ class NotificationTransportViewSet(UsedByMixin, ModelViewSet): | |||||||
|             severity=NotificationSeverity.NOTICE, |             severity=NotificationSeverity.NOTICE, | ||||||
|             body=f"Test Notification from transport {transport.name}", |             body=f"Test Notification from transport {transport.name}", | ||||||
|             user=request.user, |             user=request.user, | ||||||
|  |             event=Event( | ||||||
|  |                 action="Test", | ||||||
|  |                 user=get_user(request.user), | ||||||
|  |                 app=self.__class__.__module__, | ||||||
|  |                 context={"foo": "bar"}, | ||||||
|  |             ), | ||||||
|         ) |         ) | ||||||
|         try: |         try: | ||||||
|             response = NotificationTransportTestSerializer( |             response = NotificationTransportTestSerializer( | ||||||
|  | |||||||
| @ -7,6 +7,7 @@ from typing import Optional, TypedDict | |||||||
| from geoip2.database import Reader | from geoip2.database import Reader | ||||||
| from geoip2.errors import GeoIP2Error | from geoip2.errors import GeoIP2Error | ||||||
| from geoip2.models import City | from geoip2.models import City | ||||||
|  | from sentry_sdk.hub import Hub | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| @ -34,12 +35,11 @@ class GeoIPReader: | |||||||
|  |  | ||||||
|     def __open(self): |     def __open(self): | ||||||
|         """Get GeoIP Reader, if configured, otherwise none""" |         """Get GeoIP Reader, if configured, otherwise none""" | ||||||
|         path = CONFIG.y("authentik.geoip") |         path = CONFIG.y("geoip") | ||||||
|         if path == "" or not path: |         if path == "" or not path: | ||||||
|             return |             return | ||||||
|         try: |         try: | ||||||
|             reader = Reader(path) |             self.__reader = Reader(path) | ||||||
|             self.__reader = reader |  | ||||||
|             self.__last_mtime = stat(path).st_mtime |             self.__last_mtime = stat(path).st_mtime | ||||||
|             LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime) |             LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime) | ||||||
|         except OSError as exc: |         except OSError as exc: | ||||||
| @ -62,13 +62,17 @@ class GeoIPReader: | |||||||
|  |  | ||||||
|     def city(self, ip_address: str) -> Optional[City]: |     def city(self, ip_address: str) -> Optional[City]: | ||||||
|         """Wrapper for Reader.city""" |         """Wrapper for Reader.city""" | ||||||
|         if not self.enabled: |         with Hub.current.start_span( | ||||||
|             return None |             op="authentik.events.geo.city", | ||||||
|         self.__check_expired() |             description=ip_address, | ||||||
|         try: |         ): | ||||||
|             return self.__reader.city(ip_address) |             if not self.enabled: | ||||||
|         except (GeoIP2Error, ValueError): |                 return None | ||||||
|             return None |             self.__check_expired() | ||||||
|  |             try: | ||||||
|  |                 return self.__reader.city(ip_address) | ||||||
|  |             except (GeoIP2Error, ValueError): | ||||||
|  |                 return None | ||||||
|  |  | ||||||
|     def city_dict(self, ip_address: str) -> Optional[GeoIPDict]: |     def city_dict(self, ip_address: str) -> Optional[GeoIPDict]: | ||||||
|         """Wrapper for self.city that returns a dict""" |         """Wrapper for self.city that returns a dict""" | ||||||
|  | |||||||
| @ -19,7 +19,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | |||||||
|     Event = apps.get_model("authentik_events", "Event") |     Event = apps.get_model("authentik_events", "Event") | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |     db_alias = schema_editor.connection.alias | ||||||
|     for event in Event.objects.all(): |     for event in Event.objects.using(db_alias).all(): | ||||||
|         event.delete() |         event.delete() | ||||||
|         # Because event objects cannot be updated, we have to re-create them |         # Because event objects cannot be updated, we have to re-create them | ||||||
|         event.pk = None |         event.pk = None | ||||||
| @ -314,169 +314,10 @@ class Migration(migrations.Migration): | |||||||
|             old_name="user_json", |             old_name="user_json", | ||||||
|             new_name="user", |             new_name="user", | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("sign_up", "Sign Up"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("invitation_created", "Invite Created"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("invitation_created", "Invite Created"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="event", |             model_name="event", | ||||||
|             name="date", |             name="date", | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("token_view", "Token View"), |  | ||||||
|                     ("invitation_created", "Invite Created"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("token_view", "Token View"), |  | ||||||
|                     ("invitation_created", "Invite Created"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("token_view", "Token View"), |  | ||||||
|                     ("invitation_created", "Invite Created"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("token_view", "Token View"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("configuration_error", "Configuration Error"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="NotificationTransport", |             name="NotificationTransport", | ||||||
|             fields=[ |             fields=[ | ||||||
| @ -610,68 +451,6 @@ class Migration(migrations.Migration): | |||||||
|                 help_text="Only send notification once, for example when sending a webhook into a chat channel.", |                 help_text="Only send notification once, for example when sending a webhook into a chat channel.", | ||||||
|             ), |             ), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("token_view", "Token View"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("system_task_execution", "System Task Execution"), |  | ||||||
|                     ("system_task_exception", "System Task Exception"), |  | ||||||
|                     ("configuration_error", "Configuration Error"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("secret_view", "Secret View"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("system_task_execution", "System Task Execution"), |  | ||||||
|                     ("system_task_exception", "System Task Exception"), |  | ||||||
|                     ("configuration_error", "Configuration Error"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.RunPython( |         migrations.RunPython( | ||||||
|             code=token_view_to_secret_view, |             code=token_view_to_secret_view, | ||||||
|         ), |         ), | ||||||
| @ -688,76 +467,11 @@ class Migration(migrations.Migration): | |||||||
|         migrations.RunPython( |         migrations.RunPython( | ||||||
|             code=update_expires, |             code=update_expires, | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("secret_view", "Secret View"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("system_task_execution", "System Task Execution"), |  | ||||||
|                     ("system_task_exception", "System Task Exception"), |  | ||||||
|                     ("configuration_error", "Configuration Error"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("email_sent", "Email Sent"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddField( |         migrations.AddField( | ||||||
|             model_name="event", |             model_name="event", | ||||||
|             name="tenant", |             name="tenant", | ||||||
|             field=models.JSONField(blank=True, default=authentik.events.models.default_tenant), |             field=models.JSONField(blank=True, default=authentik.events.models.default_tenant), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |  | ||||||
|             model_name="event", |  | ||||||
|             name="action", |  | ||||||
|             field=models.TextField( |  | ||||||
|                 choices=[ |  | ||||||
|                     ("login", "Login"), |  | ||||||
|                     ("login_failed", "Login Failed"), |  | ||||||
|                     ("logout", "Logout"), |  | ||||||
|                     ("user_write", "User Write"), |  | ||||||
|                     ("suspicious_request", "Suspicious Request"), |  | ||||||
|                     ("password_set", "Password Set"), |  | ||||||
|                     ("secret_view", "Secret View"), |  | ||||||
|                     ("invitation_used", "Invite Used"), |  | ||||||
|                     ("authorize_application", "Authorize Application"), |  | ||||||
|                     ("source_linked", "Source Linked"), |  | ||||||
|                     ("impersonation_started", "Impersonation Started"), |  | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |  | ||||||
|                     ("policy_execution", "Policy Execution"), |  | ||||||
|                     ("policy_exception", "Policy Exception"), |  | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |  | ||||||
|                     ("system_task_execution", "System Task Execution"), |  | ||||||
|                     ("system_task_exception", "System Task Exception"), |  | ||||||
|                     ("system_exception", "System Exception"), |  | ||||||
|                     ("configuration_error", "Configuration Error"), |  | ||||||
|                     ("model_created", "Model Created"), |  | ||||||
|                     ("model_updated", "Model Updated"), |  | ||||||
|                     ("model_deleted", "Model Deleted"), |  | ||||||
|                     ("email_sent", "Email Sent"), |  | ||||||
|                     ("update_available", "Update Available"), |  | ||||||
|                     ("custom_", "Custom Prefix"), |  | ||||||
|                 ] |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="event", |             model_name="event", | ||||||
|             name="action", |             name="action", | ||||||
| @ -776,6 +490,7 @@ class Migration(migrations.Migration): | |||||||
|                     ("source_linked", "Source Linked"), |                     ("source_linked", "Source Linked"), | ||||||
|                     ("impersonation_started", "Impersonation Started"), |                     ("impersonation_started", "Impersonation Started"), | ||||||
|                     ("impersonation_ended", "Impersonation Ended"), |                     ("impersonation_ended", "Impersonation Ended"), | ||||||
|  |                     ("flow_execution", "Flow Execution"), | ||||||
|                     ("policy_execution", "Policy Execution"), |                     ("policy_execution", "Policy Execution"), | ||||||
|                     ("policy_exception", "Policy Exception"), |                     ("policy_exception", "Policy Exception"), | ||||||
|                     ("property_mapping_exception", "Property Mapping Exception"), |                     ("property_mapping_exception", "Property Mapping Exception"), | ||||||
|  | |||||||
| @ -10,7 +10,7 @@ def convert_user_to_json(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | |||||||
|     Event = apps.get_model("authentik_events", "Event") |     Event = apps.get_model("authentik_events", "Event") | ||||||
|  |  | ||||||
|     db_alias = schema_editor.connection.alias |     db_alias = schema_editor.connection.alias | ||||||
|     for event in Event.objects.all(): |     for event in Event.objects.using(db_alias).all(): | ||||||
|         event.delete() |         event.delete() | ||||||
|         # Because event objects cannot be updated, we have to re-create them |         # Because event objects cannot be updated, we have to re-create them | ||||||
|         event.pk = None |         event.pk = None | ||||||
|  | |||||||
| @ -1,12 +1,20 @@ | |||||||
| """authentik events models""" | """authentik events models""" | ||||||
|  | import time | ||||||
|  | from collections import Counter | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from inspect import getmodule, stack | from inspect import currentframe | ||||||
| from smtplib import SMTPException | from smtplib import SMTPException | ||||||
| from typing import TYPE_CHECKING, Optional, Type, Union | from typing import TYPE_CHECKING, Optional | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.db import models | from django.db import models | ||||||
|  | from django.db.models import Count, ExpressionWrapper, F | ||||||
|  | from django.db.models.fields import DurationField | ||||||
|  | from django.db.models.functions import ExtractHour | ||||||
|  | from django.db.models.functions.datetime import ExtractDay | ||||||
|  | from django.db.models.manager import Manager | ||||||
|  | from django.db.models.query import QuerySet | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| from django.utils.timezone import now | from django.utils.timezone import now | ||||||
| @ -70,6 +78,7 @@ class EventAction(models.TextChoices): | |||||||
|     IMPERSONATION_STARTED = "impersonation_started" |     IMPERSONATION_STARTED = "impersonation_started" | ||||||
|     IMPERSONATION_ENDED = "impersonation_ended" |     IMPERSONATION_ENDED = "impersonation_ended" | ||||||
|  |  | ||||||
|  |     FLOW_EXECUTION = "flow_execution" | ||||||
|     POLICY_EXECUTION = "policy_execution" |     POLICY_EXECUTION = "policy_execution" | ||||||
|     POLICY_EXCEPTION = "policy_exception" |     POLICY_EXCEPTION = "policy_exception" | ||||||
|     PROPERTY_MAPPING_EXCEPTION = "property_mapping_exception" |     PROPERTY_MAPPING_EXCEPTION = "property_mapping_exception" | ||||||
| @ -90,6 +99,72 @@ class EventAction(models.TextChoices): | |||||||
|     CUSTOM_PREFIX = "custom_" |     CUSTOM_PREFIX = "custom_" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventQuerySet(QuerySet): | ||||||
|  |     """Custom events query set with helper functions""" | ||||||
|  |  | ||||||
|  |     def get_events_per_hour(self) -> list[dict[str, int]]: | ||||||
|  |         """Get event count by hour in the last day, fill with zeros""" | ||||||
|  |         date_from = now() - timedelta(days=1) | ||||||
|  |         result = ( | ||||||
|  |             self.filter(created__gte=date_from) | ||||||
|  |             .annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField())) | ||||||
|  |             .annotate(age_hours=ExtractHour("age")) | ||||||
|  |             .values("age_hours") | ||||||
|  |             .annotate(count=Count("pk")) | ||||||
|  |             .order_by("age_hours") | ||||||
|  |         ) | ||||||
|  |         data = Counter({int(d["age_hours"]): d["count"] for d in result}) | ||||||
|  |         results = [] | ||||||
|  |         _now = now() | ||||||
|  |         for hour in range(0, -24, -1): | ||||||
|  |             results.append( | ||||||
|  |                 { | ||||||
|  |                     "x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000, | ||||||
|  |                     "y_cord": data[hour * -1], | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return results | ||||||
|  |  | ||||||
|  |     def get_events_per_day(self) -> list[dict[str, int]]: | ||||||
|  |         """Get event count by hour in the last day, fill with zeros""" | ||||||
|  |         date_from = now() - timedelta(weeks=4) | ||||||
|  |         result = ( | ||||||
|  |             self.filter(created__gte=date_from) | ||||||
|  |             .annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField())) | ||||||
|  |             .annotate(age_days=ExtractDay("age")) | ||||||
|  |             .values("age_days") | ||||||
|  |             .annotate(count=Count("pk")) | ||||||
|  |             .order_by("age_days") | ||||||
|  |         ) | ||||||
|  |         data = Counter({int(d["age_days"]): d["count"] for d in result}) | ||||||
|  |         results = [] | ||||||
|  |         _now = now() | ||||||
|  |         for day in range(0, -30, -1): | ||||||
|  |             results.append( | ||||||
|  |                 { | ||||||
|  |                     "x_cord": time.mktime((_now + timedelta(days=day)).timetuple()) * 1000, | ||||||
|  |                     "y_cord": data[day * -1], | ||||||
|  |                 } | ||||||
|  |             ) | ||||||
|  |         return results | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EventManager(Manager): | ||||||
|  |     """Custom helper methods for Events""" | ||||||
|  |  | ||||||
|  |     def get_queryset(self) -> QuerySet: | ||||||
|  |         """use custom queryset""" | ||||||
|  |         return EventQuerySet(self.model, using=self._db) | ||||||
|  |  | ||||||
|  |     def get_events_per_hour(self) -> list[dict[str, int]]: | ||||||
|  |         """Wrap method from queryset""" | ||||||
|  |         return self.get_queryset().get_events_per_hour() | ||||||
|  |  | ||||||
|  |     def get_events_per_day(self) -> list[dict[str, int]]: | ||||||
|  |         """Wrap method from queryset""" | ||||||
|  |         return self.get_queryset().get_events_per_day() | ||||||
|  |  | ||||||
|  |  | ||||||
| class Event(ExpiringModel): | class Event(ExpiringModel): | ||||||
|     """An individual Audit/Metrics/Notification/Error Event""" |     """An individual Audit/Metrics/Notification/Error Event""" | ||||||
|  |  | ||||||
| @ -105,6 +180,8 @@ class Event(ExpiringModel): | |||||||
|     # Shadow the expires attribute from ExpiringModel to override the default duration |     # Shadow the expires attribute from ExpiringModel to override the default duration | ||||||
|     expires = models.DateTimeField(default=default_event_duration) |     expires = models.DateTimeField(default=default_event_duration) | ||||||
|  |  | ||||||
|  |     objects = EventManager() | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def _get_app_from_request(request: HttpRequest) -> str: |     def _get_app_from_request(request: HttpRequest) -> str: | ||||||
|         if not isinstance(request, HttpRequest): |         if not isinstance(request, HttpRequest): | ||||||
| @ -113,16 +190,17 @@ class Event(ExpiringModel): | |||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def new( |     def new( | ||||||
|         action: Union[str, EventAction], |         action: str | EventAction, | ||||||
|         app: Optional[str] = None, |         app: Optional[str] = None, | ||||||
|         _inspect_offset: int = 1, |  | ||||||
|         **kwargs, |         **kwargs, | ||||||
|     ) -> "Event": |     ) -> "Event": | ||||||
|         """Create new Event instance from arguments. Instance is NOT saved.""" |         """Create new Event instance from arguments. Instance is NOT saved.""" | ||||||
|         if not isinstance(action, EventAction): |         if not isinstance(action, EventAction): | ||||||
|             action = EventAction.CUSTOM_PREFIX + action |             action = EventAction.CUSTOM_PREFIX + action | ||||||
|         if not app: |         if not app: | ||||||
|             app = getmodule(stack()[_inspect_offset][0]).__name__ |             current = currentframe() | ||||||
|  |             parent = current.f_back | ||||||
|  |             app = parent.f_globals["__name__"] | ||||||
|         cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs)) |         cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs)) | ||||||
|         event = Event(action=action, app=app, context=cleaned_kwargs) |         event = Event(action=action, app=app, context=cleaned_kwargs) | ||||||
|         return event |         return event | ||||||
| @ -439,7 +517,7 @@ class NotificationWebhookMapping(PropertyMapping): | |||||||
|         return "ak-property-mapping-notification-form" |         return "ak-property-mapping-notification-form" | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def serializer(self) -> Type["Serializer"]: |     def serializer(self) -> type["Serializer"]: | ||||||
|         from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer |         from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer | ||||||
|  |  | ||||||
|         return NotificationWebhookMappingSerializer |         return NotificationWebhookMappingSerializer | ||||||
|  | |||||||
| @ -46,7 +46,7 @@ class TaskResult: | |||||||
|  |  | ||||||
|     def with_error(self, exc: Exception) -> "TaskResult": |     def with_error(self, exc: Exception) -> "TaskResult": | ||||||
|         """Since errors might not always be pickle-able, set the traceback""" |         """Since errors might not always be pickle-able, set the traceback""" | ||||||
|         self.messages.extend(exception_to_string(exc).splitlines()) |         self.messages.append(str(exc)) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -90,7 +90,7 @@ class StageViewSet( | |||||||
|             stages += list(configurable_stage.objects.all().order_by("name")) |             stages += list(configurable_stage.objects.all().order_by("name")) | ||||||
|         matching_stages: list[dict] = [] |         matching_stages: list[dict] = [] | ||||||
|         for stage in stages: |         for stage in stages: | ||||||
|             user_settings = stage.ui_user_settings |             user_settings = stage.ui_user_settings() | ||||||
|             if not user_settings: |             if not user_settings: | ||||||
|                 continue |                 continue | ||||||
|             user_settings.initial_data["object_uid"] = str(stage.pk) |             user_settings.initial_data["object_uid"] = str(stage.pk) | ||||||
|  | |||||||
| @ -72,7 +72,7 @@ class WithUserInfoChallenge(Challenge): | |||||||
|     pending_user_avatar = CharField() |     pending_user_avatar = CharField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class AccessDeniedChallenge(Challenge): | class AccessDeniedChallenge(WithUserInfoChallenge): | ||||||
|     """Challenge when a flow's active stage calls `stage_invalid()`.""" |     """Challenge when a flow's active stage calls `stage_invalid()`.""" | ||||||
|  |  | ||||||
|     error_message = CharField(required=False) |     error_message = CharField(required=False) | ||||||
|  | |||||||
| @ -1,11 +1,14 @@ | |||||||
| """flow exceptions""" | """flow exceptions""" | ||||||
|  |  | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
|  | from authentik.policies.types import PolicyResult | ||||||
|  |  | ||||||
|  |  | ||||||
| class FlowNonApplicableException(SentryIgnoredException): | class FlowNonApplicableException(SentryIgnoredException): | ||||||
|     """Flow does not apply to current user (denied by policy).""" |     """Flow does not apply to current user (denied by policy).""" | ||||||
|  |  | ||||||
|  |     policy_result: PolicyResult | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmptyFlowException(SentryIgnoredException): | class EmptyFlowException(SentryIgnoredException): | ||||||
|     """Flow has no stages.""" |     """Flow has no stages.""" | ||||||
|  | |||||||
| @ -10,8 +10,8 @@ def add_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | |||||||
|         "default-invalidation-flow": "Default Invalidation Flow", |         "default-invalidation-flow": "Default Invalidation Flow", | ||||||
|         "default-source-enrollment": "Welcome to authentik! Please select a username.", |         "default-source-enrollment": "Welcome to authentik! Please select a username.", | ||||||
|         "default-source-authentication": "Welcome to authentik!", |         "default-source-authentication": "Welcome to authentik!", | ||||||
|         "default-provider-authorization-implicit-consent": "Default Provider Authorization Flow (implicit consent)", |         "default-provider-authorization-implicit-consent": "Redirecting to %(app)s", | ||||||
|         "default-provider-authorization-explicit-consent": "Default Provider Authorization Flow (explicit consent)", |         "default-provider-authorization-explicit-consent": "Redirecting to %(app)s", | ||||||
|         "default-password-change": "Change password", |         "default-password-change": "Change password", | ||||||
|     } |     } | ||||||
|     db_alias = schema_editor.connection.alias |     db_alias = schema_editor.connection.alias | ||||||
|  | |||||||
							
								
								
									
										27
									
								
								authentik/flows/migrations/0021_auto_20211227_2103.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								authentik/flows/migrations/0021_auto_20211227_2103.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,27 @@ | |||||||
|  | # Generated by Django 4.0 on 2021-12-27 21:03 | ||||||
|  | from django.apps.registry import Apps | ||||||
|  | from django.db import migrations, models | ||||||
|  | from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def update_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||||
|  |     slug_title_map = { | ||||||
|  |         "default-provider-authorization-implicit-consent": "Redirecting to %(app)s", | ||||||
|  |         "default-provider-authorization-explicit-consent": "Redirecting to %(app)s", | ||||||
|  |     } | ||||||
|  |     db_alias = schema_editor.connection.alias | ||||||
|  |     Flow = apps.get_model("authentik_flows", "Flow") | ||||||
|  |     for flow in Flow.objects.using(db_alias).all(): | ||||||
|  |         if flow.slug not in slug_title_map: | ||||||
|  |             continue | ||||||
|  |         flow.title = slug_title_map[flow.slug] | ||||||
|  |         flow.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|  |     dependencies = [ | ||||||
|  |         ("authentik_flows", "0020_flowtoken"), | ||||||
|  |     ] | ||||||
|  |  | ||||||
|  |     operations = [migrations.RunPython(update_title_for_defaults)] | ||||||
| @ -1,7 +1,7 @@ | |||||||
| """Flow models""" | """Flow models""" | ||||||
| from base64 import b64decode, b64encode | from base64 import b64decode, b64encode | ||||||
| from pickle import dumps, loads  # nosec | from pickle import dumps, loads  # nosec | ||||||
| from typing import TYPE_CHECKING, Optional, Type | from typing import TYPE_CHECKING, Optional | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from django.db import models | from django.db import models | ||||||
| @ -63,7 +63,7 @@ class Stage(SerializerModel): | |||||||
|     objects = InheritanceManager() |     objects = InheritanceManager() | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def type(self) -> Type["StageView"]: |     def type(self) -> type["StageView"]: | ||||||
|         """Return StageView class that implements logic for this stage""" |         """Return StageView class that implements logic for this stage""" | ||||||
|         # This is a bit of a workaround, since we can't set class methods with setattr |         # This is a bit of a workaround, since we can't set class methods with setattr | ||||||
|         if hasattr(self, "__in_memory_type"): |         if hasattr(self, "__in_memory_type"): | ||||||
| @ -75,7 +75,6 @@ class Stage(SerializerModel): | |||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def ui_user_settings(self) -> Optional[UserSettingSerializer]: |     def ui_user_settings(self) -> Optional[UserSettingSerializer]: | ||||||
|         """Entrypoint to integrate with User settings. Can either return None if no |         """Entrypoint to integrate with User settings. Can either return None if no | ||||||
|         user settings are available, or a challenge.""" |         user settings are available, or a challenge.""" | ||||||
| @ -87,7 +86,7 @@ class Stage(SerializerModel): | |||||||
|         return f"Stage {self.name}" |         return f"Stage {self.name}" | ||||||
|  |  | ||||||
|  |  | ||||||
| def in_memory_stage(view: Type["StageView"]) -> Stage: | def in_memory_stage(view: type["StageView"]) -> Stage: | ||||||
|     """Creates an in-memory stage instance, based on a `view` as view.""" |     """Creates an in-memory stage instance, based on a `view` as view.""" | ||||||
|     stage = Stage() |     stage = Stage() | ||||||
|     # Because we can't pickle a locally generated function, |     # Because we can't pickle a locally generated function, | ||||||
| @ -285,7 +284,7 @@ class FlowToken(Token): | |||||||
|         return loads(b64decode(self._plan.encode()))  # nosec |         return loads(b64decode(self._plan.encode()))  # nosec | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"Flow Token {super.__str__()}" |         return f"Flow Token {super().__str__()}" | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  |  | ||||||
|  | |||||||
| @ -126,7 +126,9 @@ class FlowPlanner: | |||||||
|     ) -> FlowPlan: |     ) -> FlowPlan: | ||||||
|         """Check each of the flows' policies, check policies for each stage with PolicyBinding |         """Check each of the flows' policies, check policies for each stage with PolicyBinding | ||||||
|         and return ordered list""" |         and return ordered list""" | ||||||
|         with Hub.current.start_span(op="flow.planner.plan") as span: |         with Hub.current.start_span( | ||||||
|  |             op="authentik.flow.planner.plan", description=self.flow.slug | ||||||
|  |         ) as span: | ||||||
|             span: Span |             span: Span | ||||||
|             span.set_data("flow", self.flow) |             span.set_data("flow", self.flow) | ||||||
|             span.set_data("request", request) |             span.set_data("request", request) | ||||||
| @ -150,7 +152,9 @@ class FlowPlanner: | |||||||
|             engine.build() |             engine.build() | ||||||
|             result = engine.result |             result = engine.result | ||||||
|             if not result.passing: |             if not result.passing: | ||||||
|                 raise FlowNonApplicableException(",".join(result.messages)) |                 exc = FlowNonApplicableException(",".join(result.messages)) | ||||||
|  |                 exc.policy_result = result | ||||||
|  |                 raise exc | ||||||
|             # User is passing so far, check if we have a cached plan |             # User is passing so far, check if we have a cached plan | ||||||
|             cached_plan_key = cache_key(self.flow, user) |             cached_plan_key = cache_key(self.flow, user) | ||||||
|             cached_plan = cache.get(cached_plan_key, None) |             cached_plan = cache.get(cached_plan_key, None) | ||||||
| @ -181,7 +185,8 @@ class FlowPlanner: | |||||||
|         """Build flow plan by checking each stage in their respective |         """Build flow plan by checking each stage in their respective | ||||||
|         order and checking the applied policies""" |         order and checking the applied policies""" | ||||||
|         with Hub.current.start_span( |         with Hub.current.start_span( | ||||||
|             op="flow.planner.build_plan" |             op="authentik.flow.planner.build_plan", | ||||||
|  |             description=self.flow.slug, | ||||||
|         ) as span, HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time(): |         ) as span, HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time(): | ||||||
|             span: Span |             span: Span | ||||||
|             span.set_data("flow", self.flow) |             span.set_data("flow", self.flow) | ||||||
|  | |||||||
| @ -1,4 +1,6 @@ | |||||||
| """authentik stage Base view""" | """authentik stage Base view""" | ||||||
|  | from typing import TYPE_CHECKING, Optional | ||||||
|  |  | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| @ -6,19 +8,24 @@ from django.http.response import HttpResponse | |||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from django.views.generic.base import View | from django.views.generic.base import View | ||||||
| from rest_framework.request import Request | from rest_framework.request import Request | ||||||
|  | from sentry_sdk.hub import Hub | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik.core.models import DEFAULT_AVATAR, User | from authentik.core.models import DEFAULT_AVATAR, User | ||||||
| from authentik.flows.challenge import ( | from authentik.flows.challenge import ( | ||||||
|  |     AccessDeniedChallenge, | ||||||
|     Challenge, |     Challenge, | ||||||
|     ChallengeResponse, |     ChallengeResponse, | ||||||
|  |     ChallengeTypes, | ||||||
|     ContextualFlowInfo, |     ContextualFlowInfo, | ||||||
|     HttpChallengeResponse, |     HttpChallengeResponse, | ||||||
|     WithUserInfoChallenge, |     WithUserInfoChallenge, | ||||||
| ) | ) | ||||||
| from authentik.flows.models import InvalidResponseAction | from authentik.flows.models import InvalidResponseAction | ||||||
| from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER | from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER | ||||||
| from authentik.flows.views.executor import FlowExecutorView |  | ||||||
|  | if TYPE_CHECKING: | ||||||
|  |     from authentik.flows.views.executor import FlowExecutorView | ||||||
|  |  | ||||||
| PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier" | PLAN_CONTEXT_PENDING_USER_IDENTIFIER = "pending_user_identifier" | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| @ -27,11 +34,11 @@ LOGGER = get_logger() | |||||||
| class StageView(View): | class StageView(View): | ||||||
|     """Abstract Stage, inherits TemplateView but can be combined with FormView""" |     """Abstract Stage, inherits TemplateView but can be combined with FormView""" | ||||||
|  |  | ||||||
|     executor: FlowExecutorView |     executor: "FlowExecutorView" | ||||||
|  |  | ||||||
|     request: HttpRequest = None |     request: HttpRequest = None | ||||||
|  |  | ||||||
|     def __init__(self, executor: FlowExecutorView, **kwargs): |     def __init__(self, executor: "FlowExecutorView", **kwargs): | ||||||
|         self.executor = executor |         self.executor = executor | ||||||
|         super().__init__(**kwargs) |         super().__init__(**kwargs) | ||||||
|  |  | ||||||
| @ -42,6 +49,8 @@ class StageView(View): | |||||||
|         other things besides the form display. |         other things besides the form display. | ||||||
|  |  | ||||||
|         If no user is pending, returns request.user""" |         If no user is pending, returns request.user""" | ||||||
|  |         if not self.executor.plan: | ||||||
|  |             return self.request.user | ||||||
|         if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context and for_display: |         if PLAN_CONTEXT_PENDING_USER_IDENTIFIER in self.executor.plan.context and for_display: | ||||||
|             return User( |             return User( | ||||||
|                 username=self.executor.plan.context.get(PLAN_CONTEXT_PENDING_USER_IDENTIFIER), |                 username=self.executor.plan.context.get(PLAN_CONTEXT_PENDING_USER_IDENTIFIER), | ||||||
| @ -94,17 +103,31 @@ class ChallengeStageView(StageView): | |||||||
|                     keep_context=keep_context, |                     keep_context=keep_context, | ||||||
|                 ) |                 ) | ||||||
|                 return self.executor.restart_flow(keep_context) |                 return self.executor.restart_flow(keep_context) | ||||||
|             return self.challenge_invalid(challenge) |             with Hub.current.start_span( | ||||||
|         return self.challenge_valid(challenge) |                 op="authentik.flow.stage.challenge_invalid", | ||||||
|  |                 description=self.__class__.__name__, | ||||||
|  |             ): | ||||||
|  |                 return self.challenge_invalid(challenge) | ||||||
|  |         with Hub.current.start_span( | ||||||
|  |             op="authentik.flow.stage.challenge_valid", | ||||||
|  |             description=self.__class__.__name__, | ||||||
|  |         ): | ||||||
|  |             return self.challenge_valid(challenge) | ||||||
|  |  | ||||||
|     def format_title(self) -> str: |     def format_title(self) -> str: | ||||||
|         """Allow usage of placeholder in flow title.""" |         """Allow usage of placeholder in flow title.""" | ||||||
|  |         if not self.executor.plan: | ||||||
|  |             return self.executor.flow.title | ||||||
|         return self.executor.flow.title % { |         return self.executor.flow.title % { | ||||||
|             "app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "") |             "app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "") | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def _get_challenge(self, *args, **kwargs) -> Challenge: |     def _get_challenge(self, *args, **kwargs) -> Challenge: | ||||||
|         challenge = self.get_challenge(*args, **kwargs) |         with Hub.current.start_span( | ||||||
|  |             op="authentik.flow.stage.get_challenge", | ||||||
|  |             description=self.__class__.__name__, | ||||||
|  |         ): | ||||||
|  |             challenge = self.get_challenge(*args, **kwargs) | ||||||
|         if "flow_info" not in challenge.initial_data: |         if "flow_info" not in challenge.initial_data: | ||||||
|             flow_info = ContextualFlowInfo( |             flow_info = ContextualFlowInfo( | ||||||
|                 data={ |                 data={ | ||||||
| @ -156,3 +179,27 @@ class ChallengeStageView(StageView): | |||||||
|                 stage_view=self, |                 stage_view=self, | ||||||
|             ) |             ) | ||||||
|         return HttpChallengeResponse(challenge_response) |         return HttpChallengeResponse(challenge_response) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class AccessDeniedChallengeView(ChallengeStageView): | ||||||
|  |     """Used internally by FlowExecutor's stage_invalid()""" | ||||||
|  |  | ||||||
|  |     error_message: Optional[str] | ||||||
|  |  | ||||||
|  |     def __init__(self, executor: "FlowExecutorView", error_message: Optional[str] = None, **kwargs): | ||||||
|  |         super().__init__(executor, **kwargs) | ||||||
|  |         self.error_message = error_message | ||||||
|  |  | ||||||
|  |     def get_challenge(self, *args, **kwargs) -> Challenge: | ||||||
|  |         return AccessDeniedChallenge( | ||||||
|  |             data={ | ||||||
|  |                 "error_message": self.error_message or "Unknown error", | ||||||
|  |                 "type": ChallengeTypes.NATIVE.value, | ||||||
|  |                 "component": "ak-stage-access-denied", | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     # This can never be reached since this challenge is created on demand and only the | ||||||
|  |     # .get() method is called | ||||||
|  |     def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:  # pragma: no cover | ||||||
|  |         return self.executor.cancel() | ||||||
|  | |||||||
| @ -0,0 +1,51 @@ | |||||||
|  | """Test helpers""" | ||||||
|  | from json import loads | ||||||
|  | from typing import Any, Optional | ||||||
|  |  | ||||||
|  | from django.http.response import HttpResponse | ||||||
|  | from django.urls.base import reverse | ||||||
|  | from rest_framework.test import APITestCase | ||||||
|  |  | ||||||
|  | from authentik.core.models import User | ||||||
|  | from authentik.flows.challenge import ChallengeTypes | ||||||
|  | from authentik.flows.models import Flow | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FlowTestCase(APITestCase): | ||||||
|  |     """Helpers for testing flows and stages.""" | ||||||
|  |  | ||||||
|  |     # pylint: disable=invalid-name | ||||||
|  |     def assertStageResponse( | ||||||
|  |         self, | ||||||
|  |         response: HttpResponse, | ||||||
|  |         flow: Optional[Flow] = None, | ||||||
|  |         user: Optional[User] = None, | ||||||
|  |         **kwargs, | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         """Assert various attributes of a stage response""" | ||||||
|  |         raw_response = loads(response.content.decode()) | ||||||
|  |         self.assertIsNotNone(raw_response["component"]) | ||||||
|  |         self.assertIsNotNone(raw_response["type"]) | ||||||
|  |         if flow: | ||||||
|  |             self.assertIn("flow_info", raw_response) | ||||||
|  |             self.assertEqual(raw_response["flow_info"]["background"], flow.background_url) | ||||||
|  |             self.assertEqual( | ||||||
|  |                 raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel") | ||||||
|  |             ) | ||||||
|  |             # We don't check the flow title since it will most likely go | ||||||
|  |             # through ChallengeStageView.format_title() so might not match 1:1 | ||||||
|  |             # self.assertEqual(raw_response["flow_info"]["title"], flow.title) | ||||||
|  |             self.assertIsNotNone(raw_response["flow_info"]["title"]) | ||||||
|  |         if user: | ||||||
|  |             self.assertEqual(raw_response["pending_user"], user.username) | ||||||
|  |             self.assertEqual(raw_response["pending_user_avatar"], user.avatar) | ||||||
|  |         for key, expected in kwargs.items(): | ||||||
|  |             self.assertEqual(raw_response[key], expected) | ||||||
|  |         return raw_response | ||||||
|  |  | ||||||
|  |     # pylint: disable=invalid-name | ||||||
|  |     def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]: | ||||||
|  |         """Wrapper around assertStageResponse that checks for a redirect""" | ||||||
|  |         return self.assertStageResponse( | ||||||
|  |             response, component="xak-flow-redirect", to=to, type=ChallengeTypes.REDIRECT.value | ||||||
|  |         ) | ||||||
|  | |||||||
| @ -32,7 +32,7 @@ class TestFlowsAPI(APITestCase): | |||||||
|  |  | ||||||
|     def test_models(self): |     def test_models(self): | ||||||
|         """Test that ui_user_settings returns none""" |         """Test that ui_user_settings returns none""" | ||||||
|         self.assertIsNone(Stage().ui_user_settings) |         self.assertIsNone(Stage().ui_user_settings()) | ||||||
|  |  | ||||||
|     def test_api_serializer(self): |     def test_api_serializer(self): | ||||||
|         """Test that stage serializer returns the correct type""" |         """Test that stage serializer returns the correct type""" | ||||||
|  | |||||||
| @ -4,16 +4,14 @@ from unittest.mock import MagicMock, PropertyMock, patch | |||||||
| from django.http import HttpRequest, HttpResponse | from django.http import HttpRequest, HttpResponse | ||||||
| from django.test.client import RequestFactory | from django.test.client import RequestFactory | ||||||
| from django.urls import reverse | from django.urls import reverse | ||||||
| from django.utils.encoding import force_str |  | ||||||
| from rest_framework.test import APITestCase |  | ||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.flows.challenge import ChallengeTypes |  | ||||||
| from authentik.flows.exceptions import FlowNonApplicableException | from authentik.flows.exceptions import FlowNonApplicableException | ||||||
| from authentik.flows.markers import ReevaluateMarker, StageMarker | from authentik.flows.markers import ReevaluateMarker, StageMarker | ||||||
| from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction | from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, InvalidResponseAction | ||||||
| from authentik.flows.planner import FlowPlan, FlowPlanner | from authentik.flows.planner import FlowPlan, FlowPlanner | ||||||
| from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView | from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView | ||||||
|  | from authentik.flows.tests import FlowTestCase | ||||||
| from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView | from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.policies.dummy.models import DummyPolicy | from authentik.policies.dummy.models import DummyPolicy | ||||||
| @ -37,7 +35,7 @@ def to_stage_response(request: HttpRequest, source: HttpResponse): | |||||||
| TO_STAGE_RESPONSE_MOCK = MagicMock(side_effect=to_stage_response) | TO_STAGE_RESPONSE_MOCK = MagicMock(side_effect=to_stage_response) | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestFlowExecutor(APITestCase): | class TestFlowExecutor(FlowTestCase): | ||||||
|     """Test executor""" |     """Test executor""" | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
| @ -90,18 +88,11 @@ class TestFlowExecutor(APITestCase): | |||||||
|             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), |             reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageResponse( | ||||||
|             force_str(response.content), |             response, | ||||||
|             { |             flow=flow, | ||||||
|                 "component": "ak-stage-access-denied", |             error_message=FlowNonApplicableException.__doc__, | ||||||
|                 "error_message": FlowNonApplicableException.__doc__, |             component="ak-stage-access-denied", | ||||||
|                 "flow_info": { |  | ||||||
|                     "background": flow.background_url, |  | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                     "title": "", |  | ||||||
|                 }, |  | ||||||
|                 "type": ChallengeTypes.NATIVE.value, |  | ||||||
|             }, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @patch( |     @patch( | ||||||
| @ -283,14 +274,7 @@ class TestFlowExecutor(APITestCase): | |||||||
|         # We do this request without the patch, so the policy results in false |         # We do this request without the patch, so the policy results in false | ||||||
|         response = self.client.post(exec_url) |         response = self.client.post(exec_url) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||||
|             force_str(response.content), |  | ||||||
|             { |  | ||||||
|                 "component": "xak-flow-redirect", |  | ||||||
|                 "to": reverse("authentik_core:root-redirect"), |  | ||||||
|                 "type": ChallengeTypes.REDIRECT.value, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_reevaluate_keep(self): |     def test_reevaluate_keep(self): | ||||||
|         """Test planner with re-evaluate (everything is kept)""" |         """Test planner with re-evaluate (everything is kept)""" | ||||||
| @ -360,14 +344,7 @@ class TestFlowExecutor(APITestCase): | |||||||
|         # We do this request without the patch, so the policy results in false |         # We do this request without the patch, so the policy results in false | ||||||
|         response = self.client.post(exec_url) |         response = self.client.post(exec_url) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||||
|             force_str(response.content), |  | ||||||
|             { |  | ||||||
|                 "component": "xak-flow-redirect", |  | ||||||
|                 "to": reverse("authentik_core:root-redirect"), |  | ||||||
|                 "type": ChallengeTypes.REDIRECT.value, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_reevaluate_remove_consecutive(self): |     def test_reevaluate_remove_consecutive(self): | ||||||
|         """Test planner with re-evaluate (consecutive stages are removed)""" |         """Test planner with re-evaluate (consecutive stages are removed)""" | ||||||
| @ -407,18 +384,7 @@ class TestFlowExecutor(APITestCase): | |||||||
|             # First request, run the planner |             # First request, run the planner | ||||||
|             response = self.client.get(exec_url) |             response = self.client.get(exec_url) | ||||||
|             self.assertEqual(response.status_code, 200) |             self.assertEqual(response.status_code, 200) | ||||||
|             self.assertJSONEqual( |             self.assertStageResponse(response, flow, component="ak-stage-dummy") | ||||||
|                 force_str(response.content), |  | ||||||
|                 { |  | ||||||
|                     "type": ChallengeTypes.NATIVE.value, |  | ||||||
|                     "component": "ak-stage-dummy", |  | ||||||
|                     "flow_info": { |  | ||||||
|                         "background": flow.background_url, |  | ||||||
|                         "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                         "title": "", |  | ||||||
|                     }, |  | ||||||
|                 }, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             plan: FlowPlan = self.client.session[SESSION_KEY_PLAN] |             plan: FlowPlan = self.client.session[SESSION_KEY_PLAN] | ||||||
|  |  | ||||||
| @ -441,31 +407,13 @@ class TestFlowExecutor(APITestCase): | |||||||
|         # but it won't save it, hence we can't check the plan |         # but it won't save it, hence we can't check the plan | ||||||
|         response = self.client.get(exec_url) |         response = self.client.get(exec_url) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageResponse(response, flow, component="ak-stage-dummy") | ||||||
|             force_str(response.content), |  | ||||||
|             { |  | ||||||
|                 "type": ChallengeTypes.NATIVE.value, |  | ||||||
|                 "component": "ak-stage-dummy", |  | ||||||
|                 "flow_info": { |  | ||||||
|                     "background": flow.background_url, |  | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                     "title": "", |  | ||||||
|                 }, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # fourth request, this confirms the last stage (dummy4) |         # fourth request, this confirms the last stage (dummy4) | ||||||
|         # We do this request without the patch, so the policy results in false |         # We do this request without the patch, so the policy results in false | ||||||
|         response = self.client.post(exec_url) |         response = self.client.post(exec_url) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageRedirects(response, reverse("authentik_core:root-redirect")) | ||||||
|             force_str(response.content), |  | ||||||
|             { |  | ||||||
|                 "component": "xak-flow-redirect", |  | ||||||
|                 "to": reverse("authentik_core:root-redirect"), |  | ||||||
|                 "type": ChallengeTypes.REDIRECT.value, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_stageview_user_identifier(self): |     def test_stageview_user_identifier(self): | ||||||
|         """Test PLAN_CONTEXT_PENDING_USER_IDENTIFIER""" |         """Test PLAN_CONTEXT_PENDING_USER_IDENTIFIER""" | ||||||
| @ -532,35 +480,16 @@ class TestFlowExecutor(APITestCase): | |||||||
|         # First request, run the planner |         # First request, run the planner | ||||||
|         response = self.client.get(exec_url) |         response = self.client.get(exec_url) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageResponse( | ||||||
|             force_str(response.content), |             response, | ||||||
|             { |             flow, | ||||||
|                 "type": ChallengeTypes.NATIVE.value, |             component="ak-stage-identification", | ||||||
|                 "component": "ak-stage-identification", |             password_fields=False, | ||||||
|                 "flow_info": { |             primary_action="Log in", | ||||||
|                     "background": flow.background_url, |             sources=[], | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |             show_source_labels=False, | ||||||
|                     "title": "", |             user_fields=[UserFields.E_MAIL], | ||||||
|                 }, |  | ||||||
|                 "password_fields": False, |  | ||||||
|                 "primary_action": "Log in", |  | ||||||
|                 "sources": [], |  | ||||||
|                 "show_source_labels": False, |  | ||||||
|                 "user_fields": [UserFields.E_MAIL], |  | ||||||
|             }, |  | ||||||
|         ) |         ) | ||||||
|         response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True) |         response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageResponse(response, flow, component="ak-stage-access-denied") | ||||||
|             force_str(response.content), |  | ||||||
|             { |  | ||||||
|                 "component": "ak-stage-access-denied", |  | ||||||
|                 "error_message": None, |  | ||||||
|                 "flow_info": { |  | ||||||
|                     "background": flow.background_url, |  | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                     "title": "", |  | ||||||
|                 }, |  | ||||||
|                 "type": ChallengeTypes.NATIVE.value, |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| """base model tests""" | """base model tests""" | ||||||
| from typing import Callable, Type | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
|  |  | ||||||
| @ -12,7 +12,7 @@ class TestModels(TestCase): | |||||||
|     """Generic model properties tests""" |     """Generic model properties tests""" | ||||||
|  |  | ||||||
|  |  | ||||||
| def model_tester_factory(test_model: Type[Stage]) -> Callable: | def model_tester_factory(test_model: type[Stage]) -> Callable: | ||||||
|     """Test a form""" |     """Test a form""" | ||||||
|  |  | ||||||
|     def tester(self: TestModels): |     def tester(self: TestModels): | ||||||
| @ -23,7 +23,7 @@ def model_tester_factory(test_model: Type[Stage]) -> Callable: | |||||||
|             model_class = test_model() |             model_class = test_model() | ||||||
|         self.assertTrue(issubclass(model_class.type, StageView)) |         self.assertTrue(issubclass(model_class.type, StageView)) | ||||||
|         self.assertIsNotNone(test_model.component) |         self.assertIsNotNone(test_model.component) | ||||||
|         _ = model_class.ui_user_settings |         _ = model_class.ui_user_settings() | ||||||
|  |  | ||||||
|     return tester |     return tester | ||||||
|  |  | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| """stage view tests""" | """stage view tests""" | ||||||
| from typing import Callable, Type | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import RequestFactory, TestCase | from django.test import RequestFactory, TestCase | ||||||
|  |  | ||||||
| @ -16,7 +16,7 @@ class TestViews(TestCase): | |||||||
|         self.exec = FlowExecutorView(request=self.factory.get("/")) |         self.exec = FlowExecutorView(request=self.factory.get("/")) | ||||||
|  |  | ||||||
|  |  | ||||||
| def view_tester_factory(view_class: Type[StageView]) -> Callable: | def view_tester_factory(view_class: type[StageView]) -> Callable: | ||||||
|     """Test a form""" |     """Test a form""" | ||||||
|  |  | ||||||
|     def tester(self: TestViews): |     def tester(self: TestViews): | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
| from contextlib import contextmanager | from contextlib import contextmanager | ||||||
| from copy import deepcopy | from copy import deepcopy | ||||||
| from json import loads | from json import loads | ||||||
| from typing import Any, Type | from typing import Any | ||||||
|  |  | ||||||
| from dacite import from_dict | from dacite import from_dict | ||||||
| from dacite.exceptions import DaciteError | from dacite.exceptions import DaciteError | ||||||
| @ -87,7 +87,7 @@ class FlowImporter: | |||||||
|     def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer: |     def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer: | ||||||
|         """Validate a single entry""" |         """Validate a single entry""" | ||||||
|         model_app_label, model_name = entry.model.split(".") |         model_app_label, model_name = entry.model.split(".") | ||||||
|         model: Type[SerializerModel] = apps.get_model(model_app_label, model_name) |         model: type[SerializerModel] = apps.get_model(model_app_label, model_name) | ||||||
|         if not isinstance(model(), ALLOWED_MODELS): |         if not isinstance(model(), ALLOWED_MODELS): | ||||||
|             raise EntryInvalidError(f"Model {model} not allowed") |             raise EntryInvalidError(f"Model {model} not allowed") | ||||||
|  |  | ||||||
|  | |||||||
| @ -10,7 +10,6 @@ from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect | |||||||
| from django.http.request import QueryDict | from django.http.request import QueryDict | ||||||
| from django.shortcuts import get_object_or_404, redirect | from django.shortcuts import get_object_or_404, redirect | ||||||
| from django.template.response import TemplateResponse | from django.template.response import TemplateResponse | ||||||
| from django.urls.base import reverse |  | ||||||
| from django.utils.decorators import method_decorator | from django.utils.decorators import method_decorator | ||||||
| from django.views.decorators.clickjacking import xframe_options_sameorigin | from django.views.decorators.clickjacking import xframe_options_sameorigin | ||||||
| from django.views.generic import View | from django.views.generic import View | ||||||
| @ -19,12 +18,13 @@ from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer, | |||||||
| from rest_framework.permissions import AllowAny | from rest_framework.permissions import AllowAny | ||||||
| from rest_framework.views import APIView | from rest_framework.views import APIView | ||||||
| from sentry_sdk import capture_exception | from sentry_sdk import capture_exception | ||||||
|  | from sentry_sdk.api import set_tag | ||||||
|  | from sentry_sdk.hub import Hub | ||||||
| from structlog.stdlib import BoundLogger, get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
|  |  | ||||||
| from authentik.core.models import USER_ATTRIBUTE_DEBUG | from authentik.core.models import USER_ATTRIBUTE_DEBUG | ||||||
| from authentik.events.models import Event, EventAction, cleanse_dict | from authentik.events.models import Event, EventAction, cleanse_dict | ||||||
| from authentik.flows.challenge import ( | from authentik.flows.challenge import ( | ||||||
|     AccessDeniedChallenge, |  | ||||||
|     Challenge, |     Challenge, | ||||||
|     ChallengeResponse, |     ChallengeResponse, | ||||||
|     ChallengeTypes, |     ChallengeTypes, | ||||||
| @ -49,6 +49,7 @@ from authentik.flows.planner import ( | |||||||
|     FlowPlan, |     FlowPlan, | ||||||
|     FlowPlanner, |     FlowPlanner, | ||||||
| ) | ) | ||||||
|  | from authentik.flows.stage import AccessDeniedChallengeView | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.lib.utils.reflection import all_subclasses, class_to_path | from authentik.lib.utils.reflection import all_subclasses, class_to_path | ||||||
| @ -126,6 +127,7 @@ class FlowExecutorView(APIView): | |||||||
|         super().setup(request, flow_slug=flow_slug) |         super().setup(request, flow_slug=flow_slug) | ||||||
|         self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug) |         self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug) | ||||||
|         self._logger = get_logger().bind(flow_slug=flow_slug) |         self._logger = get_logger().bind(flow_slug=flow_slug) | ||||||
|  |         set_tag("authentik.flow", self.flow.slug) | ||||||
|  |  | ||||||
|     def handle_invalid_flow(self, exc: BaseException) -> HttpResponse: |     def handle_invalid_flow(self, exc: BaseException) -> HttpResponse: | ||||||
|         """When a flow is non-applicable check if user is on the correct domain""" |         """When a flow is non-applicable check if user is on the correct domain""" | ||||||
| @ -156,74 +158,80 @@ class FlowExecutorView(APIView): | |||||||
|  |  | ||||||
|     # pylint: disable=unused-argument, too-many-return-statements |     # pylint: disable=unused-argument, too-many-return-statements | ||||||
|     def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse: |     def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse: | ||||||
|         get_params = QueryDict(request.GET.get("query", "")) |         with Hub.current.start_span( | ||||||
|         if QS_KEY_TOKEN in get_params: |             op="authentik.flow.executor.dispatch", description=self.flow.slug | ||||||
|             plan = self._check_flow_token(get_params) |         ) as span: | ||||||
|             if plan: |             span.set_data("authentik Flow", self.flow.slug) | ||||||
|                 self.request.session[SESSION_KEY_PLAN] = plan |             get_params = QueryDict(request.GET.get("query", "")) | ||||||
|         # Early check if there's an active Plan for the current session |             if QS_KEY_TOKEN in get_params: | ||||||
|         if SESSION_KEY_PLAN in self.request.session: |                 plan = self._check_flow_token(get_params) | ||||||
|             self.plan = self.request.session[SESSION_KEY_PLAN] |                 if plan: | ||||||
|             if self.plan.flow_pk != self.flow.pk.hex: |                     self.request.session[SESSION_KEY_PLAN] = plan | ||||||
|                 self._logger.warning( |             # Early check if there's an active Plan for the current session | ||||||
|                     "f(exec): Found existing plan for other flow, deleting plan", |             if SESSION_KEY_PLAN in self.request.session: | ||||||
|                 ) |                 self.plan = self.request.session[SESSION_KEY_PLAN] | ||||||
|                 # Existing plan is deleted from session and instance |                 if self.plan.flow_pk != self.flow.pk.hex: | ||||||
|                 self.plan = None |                     self._logger.warning( | ||||||
|                 self.cancel() |                         "f(exec): Found existing plan for other flow, deleting plan", | ||||||
|             self._logger.debug("f(exec): Continuing existing plan") |                     ) | ||||||
|  |                     # Existing plan is deleted from session and instance | ||||||
|  |                     self.plan = None | ||||||
|  |                     self.cancel() | ||||||
|  |                 self._logger.debug("f(exec): Continuing existing plan") | ||||||
|  |  | ||||||
|         # Don't check session again as we've either already loaded the plan or we need to plan |             # Don't check session again as we've either already loaded the plan or we need to plan | ||||||
|         if not self.plan: |             if not self.plan: | ||||||
|             request.session[SESSION_KEY_HISTORY] = [] |                 request.session[SESSION_KEY_HISTORY] = [] | ||||||
|             self._logger.debug("f(exec): No active Plan found, initiating planner") |                 self._logger.debug("f(exec): No active Plan found, initiating planner") | ||||||
|  |                 try: | ||||||
|  |                     self.plan = self._initiate_plan() | ||||||
|  |                 except FlowNonApplicableException as exc: | ||||||
|  |                     self._logger.warning("f(exec): Flow not applicable to current user", exc=exc) | ||||||
|  |                     return to_stage_response(self.request, self.handle_invalid_flow(exc)) | ||||||
|  |                 except EmptyFlowException as exc: | ||||||
|  |                     self._logger.warning("f(exec): Flow is empty", exc=exc) | ||||||
|  |                     # To match behaviour with loading an empty flow plan from cache, | ||||||
|  |                     # we don't show an error message here, but rather call _flow_done() | ||||||
|  |                     return self._flow_done() | ||||||
|  |             # Initial flow request, check if we have an upstream query string passed in | ||||||
|  |             request.session[SESSION_KEY_GET] = get_params | ||||||
|  |             # We don't save the Plan after getting the next stage | ||||||
|  |             # as it hasn't been successfully passed yet | ||||||
|             try: |             try: | ||||||
|                 self.plan = self._initiate_plan() |                 # This is the first time we actually access any attribute on the selected plan | ||||||
|             except FlowNonApplicableException as exc: |                 # if the cached plan is from an older version, it might have different attributes | ||||||
|                 self._logger.warning("f(exec): Flow not applicable to current user", exc=exc) |                 # in which case we just delete the plan and invalidate everything | ||||||
|                 return to_stage_response(self.request, self.handle_invalid_flow(exc)) |                 next_binding = self.plan.next(self.request) | ||||||
|             except EmptyFlowException as exc: |             except Exception as exc:  # pylint: disable=broad-except | ||||||
|                 self._logger.warning("f(exec): Flow is empty", exc=exc) |                 self._logger.warning( | ||||||
|                 # To match behaviour with loading an empty flow plan from cache, |                     "f(exec): found incompatible flow plan, invalidating run", exc=exc | ||||||
|                 # we don't show an error message here, but rather call _flow_done() |                 ) | ||||||
|  |                 keys = cache.keys("flow_*") | ||||||
|  |                 cache.delete_many(keys) | ||||||
|  |                 return self.stage_invalid() | ||||||
|  |             if not next_binding: | ||||||
|  |                 self._logger.debug("f(exec): no more stages, flow is done.") | ||||||
|                 return self._flow_done() |                 return self._flow_done() | ||||||
|         # Initial flow request, check if we have an upstream query string passed in |             self.current_binding = next_binding | ||||||
|         request.session[SESSION_KEY_GET] = get_params |             self.current_stage = next_binding.stage | ||||||
|         # We don't save the Plan after getting the next stage |             self._logger.debug( | ||||||
|         # as it hasn't been successfully passed yet |                 "f(exec): Current stage", | ||||||
|         try: |                 current_stage=self.current_stage, | ||||||
|             # This is the first time we actually access any attribute on the selected plan |                 flow_slug=self.flow.slug, | ||||||
|             # if the cached plan is from an older version, it might have different attributes |             ) | ||||||
|             # in which case we just delete the plan and invalidate everything |             try: | ||||||
|             next_binding = self.plan.next(self.request) |                 stage_cls = self.current_stage.type | ||||||
|         except Exception as exc:  # pylint: disable=broad-except |             except NotImplementedError as exc: | ||||||
|             self._logger.warning("f(exec): found incompatible flow plan, invalidating run", exc=exc) |                 self._logger.debug("Error getting stage type", exc=exc) | ||||||
|             keys = cache.keys("flow_*") |                 return self.stage_invalid() | ||||||
|             cache.delete_many(keys) |             self.current_stage_view = stage_cls(self) | ||||||
|             return self.stage_invalid() |             self.current_stage_view.args = self.args | ||||||
|         if not next_binding: |             self.current_stage_view.kwargs = self.kwargs | ||||||
|             self._logger.debug("f(exec): no more stages, flow is done.") |             self.current_stage_view.request = request | ||||||
|             return self._flow_done() |             try: | ||||||
|         self.current_binding = next_binding |                 return super().dispatch(request) | ||||||
|         self.current_stage = next_binding.stage |             except InvalidStageError as exc: | ||||||
|         self._logger.debug( |                 return self.stage_invalid(str(exc)) | ||||||
|             "f(exec): Current stage", |  | ||||||
|             current_stage=self.current_stage, |  | ||||||
|             flow_slug=self.flow.slug, |  | ||||||
|         ) |  | ||||||
|         try: |  | ||||||
|             stage_cls = self.current_stage.type |  | ||||||
|         except NotImplementedError as exc: |  | ||||||
|             self._logger.debug("Error getting stage type", exc=exc) |  | ||||||
|             return self.stage_invalid() |  | ||||||
|         self.current_stage_view = stage_cls(self) |  | ||||||
|         self.current_stage_view.args = self.args |  | ||||||
|         self.current_stage_view.kwargs = self.kwargs |  | ||||||
|         self.current_stage_view.request = request |  | ||||||
|         try: |  | ||||||
|             return super().dispatch(request) |  | ||||||
|         except InvalidStageError as exc: |  | ||||||
|             return self.stage_invalid(str(exc)) |  | ||||||
|  |  | ||||||
|     def handle_exception(self, exc: Exception) -> HttpResponse: |     def handle_exception(self, exc: Exception) -> HttpResponse: | ||||||
|         """Handle exception in stage execution""" |         """Handle exception in stage execution""" | ||||||
| @ -265,8 +273,15 @@ class FlowExecutorView(APIView): | |||||||
|             stage=self.current_stage, |             stage=self.current_stage, | ||||||
|         ) |         ) | ||||||
|         try: |         try: | ||||||
|             stage_response = self.current_stage_view.get(request, *args, **kwargs) |             with Hub.current.start_span( | ||||||
|             return to_stage_response(request, stage_response) |                 op="authentik.flow.executor.stage", | ||||||
|  |                 description=class_to_path(self.current_stage_view.__class__), | ||||||
|  |             ) as span: | ||||||
|  |                 span.set_data("Method", "GET") | ||||||
|  |                 span.set_data("authentik Stage", self.current_stage_view) | ||||||
|  |                 span.set_data("authentik Flow", self.flow.slug) | ||||||
|  |                 stage_response = self.current_stage_view.get(request, *args, **kwargs) | ||||||
|  |                 return to_stage_response(request, stage_response) | ||||||
|         except Exception as exc:  # pylint: disable=broad-except |         except Exception as exc:  # pylint: disable=broad-except | ||||||
|             return self.handle_exception(exc) |             return self.handle_exception(exc) | ||||||
|  |  | ||||||
| @ -302,8 +317,15 @@ class FlowExecutorView(APIView): | |||||||
|             stage=self.current_stage, |             stage=self.current_stage, | ||||||
|         ) |         ) | ||||||
|         try: |         try: | ||||||
|             stage_response = self.current_stage_view.post(request, *args, **kwargs) |             with Hub.current.start_span( | ||||||
|             return to_stage_response(request, stage_response) |                 op="authentik.flow.executor.stage", | ||||||
|  |                 description=class_to_path(self.current_stage_view.__class__), | ||||||
|  |             ) as span: | ||||||
|  |                 span.set_data("Method", "POST") | ||||||
|  |                 span.set_data("authentik Stage", self.current_stage_view) | ||||||
|  |                 span.set_data("authentik Flow", self.flow.slug) | ||||||
|  |                 stage_response = self.current_stage_view.post(request, *args, **kwargs) | ||||||
|  |                 return to_stage_response(request, stage_response) | ||||||
|         except Exception as exc:  # pylint: disable=broad-except |         except Exception as exc:  # pylint: disable=broad-except | ||||||
|             return self.handle_exception(exc) |             return self.handle_exception(exc) | ||||||
|  |  | ||||||
| @ -383,21 +405,9 @@ class FlowExecutorView(APIView): | |||||||
|         is a superuser.""" |         is a superuser.""" | ||||||
|         self._logger.debug("f(exec): Stage invalid") |         self._logger.debug("f(exec): Stage invalid") | ||||||
|         self.cancel() |         self.cancel() | ||||||
|         response = HttpChallengeResponse( |         challenge_view = AccessDeniedChallengeView(self, error_message) | ||||||
|             AccessDeniedChallenge( |         challenge_view.request = self.request | ||||||
|                 { |         return to_stage_response(self.request, challenge_view.get(self.request)) | ||||||
|                     "error_message": error_message, |  | ||||||
|                     "type": ChallengeTypes.NATIVE.value, |  | ||||||
|                     "component": "ak-stage-access-denied", |  | ||||||
|                     "flow_info": { |  | ||||||
|                         "title": self.flow.title, |  | ||||||
|                         "background": self.flow.background_url, |  | ||||||
|                         "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                     }, |  | ||||||
|                 } |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|         return to_stage_response(self.request, response) |  | ||||||
|  |  | ||||||
|     def cancel(self): |     def cancel(self): | ||||||
|         """Cancel current execution and return a redirect""" |         """Cancel current execution and return a redirect""" | ||||||
|  | |||||||
| @ -87,9 +87,7 @@ class FlowInspectorView(APIView): | |||||||
|     @extend_schema( |     @extend_schema( | ||||||
|         responses={ |         responses={ | ||||||
|             200: FlowInspectionSerializer(), |             200: FlowInspectionSerializer(), | ||||||
|             400: OpenApiResponse( |             400: OpenApiResponse(description="No flow plan in session."), | ||||||
|                 description="No flow plan in session." |  | ||||||
|             ),  # This error can be raised by the email stage |  | ||||||
|         }, |         }, | ||||||
|         request=OpenApiTypes.NONE, |         request=OpenApiTypes.NONE, | ||||||
|         operation_id="flows_inspector_get", |         operation_id="flows_inspector_get", | ||||||
| @ -106,7 +104,10 @@ class FlowInspectorView(APIView): | |||||||
|         if SESSION_KEY_PLAN in request.session: |         if SESSION_KEY_PLAN in request.session: | ||||||
|             current_plan: FlowPlan = request.session[SESSION_KEY_PLAN] |             current_plan: FlowPlan = request.session[SESSION_KEY_PLAN] | ||||||
|         else: |         else: | ||||||
|             current_plan = request.session[SESSION_KEY_HISTORY][-1] |             try: | ||||||
|  |                 current_plan = request.session[SESSION_KEY_HISTORY][-1] | ||||||
|  |             except IndexError: | ||||||
|  |                 return Response(status=400) | ||||||
|             is_completed = True |             is_completed = True | ||||||
|         current_serializer = FlowInspectorPlanSerializer( |         current_serializer = FlowInspectorPlanSerializer( | ||||||
|             instance=current_plan, context={"request": request} |             instance=current_plan, context={"request": request} | ||||||
|  | |||||||
| @ -20,7 +20,6 @@ web: | |||||||
|   listen: 0.0.0.0:9000 |   listen: 0.0.0.0:9000 | ||||||
|   listen_tls: 0.0.0.0:9443 |   listen_tls: 0.0.0.0:9443 | ||||||
|   listen_metrics: 0.0.0.0:9300 |   listen_metrics: 0.0.0.0:9300 | ||||||
|   load_local_files: false |  | ||||||
|   outpost_port_offset: 0 |   outpost_port_offset: 0 | ||||||
|  |  | ||||||
| redis: | redis: | ||||||
| @ -65,7 +64,7 @@ outposts: | |||||||
|   # %(type)s: Outpost type; proxy, ldap, etc |   # %(type)s: Outpost type; proxy, ldap, etc | ||||||
|   # %(version)s: Current version; 2021.4.1 |   # %(version)s: Current version; 2021.4.1 | ||||||
|   # %(build_hash)s: Build hash if you're running a beta version |   # %(build_hash)s: Build hash if you're running a beta version | ||||||
|   container_image_base: goauthentik.io/%(type)s:%(version)s |   container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s | ||||||
|  |  | ||||||
| cookie_domain: null | cookie_domain: null | ||||||
| disable_update_check: false | disable_update_check: false | ||||||
| @ -79,6 +78,7 @@ footer_links: | |||||||
|   - name: authentik Website |   - name: authentik Website | ||||||
|     href: https://goauthentik.io/?utm_source=authentik |     href: https://goauthentik.io/?utm_source=authentik | ||||||
|  |  | ||||||
|  | default_user_change_name: true | ||||||
| default_user_change_email: true | default_user_change_email: true | ||||||
| default_user_change_username: true | default_user_change_username: true | ||||||
|  |  | ||||||
|  | |||||||
| @ -80,8 +80,9 @@ class BaseEvaluator: | |||||||
|         """Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised. |         """Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised. | ||||||
|         If any exception is raised during execution, it is raised. |         If any exception is raised during execution, it is raised. | ||||||
|         The result is returned without any type-checking.""" |         The result is returned without any type-checking.""" | ||||||
|         with Hub.current.start_span(op="lib.evaluator.evaluate") as span: |         with Hub.current.start_span(op="authentik.lib.evaluator.evaluate") as span: | ||||||
|             span: Span |             span: Span | ||||||
|  |             span.description = self._filename | ||||||
|             span.set_data("expression", expression_source) |             span.set_data("expression", expression_source) | ||||||
|             param_keys = self._context.keys() |             param_keys = self._context.keys() | ||||||
|             try: |             try: | ||||||
|  | |||||||
| @ -97,7 +97,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | |||||||
|     if "exc_info" in hint: |     if "exc_info" in hint: | ||||||
|         _, exc_value, _ = hint["exc_info"] |         _, exc_value, _ = hint["exc_info"] | ||||||
|         if isinstance(exc_value, ignored_classes): |         if isinstance(exc_value, ignored_classes): | ||||||
|             LOGGER.debug("dropping exception", exception=exc_value) |             LOGGER.debug("dropping exception", exc=exc_value) | ||||||
|             return None |             return None | ||||||
|     if "logger" in event: |     if "logger" in event: | ||||||
|         if event["logger"] in [ |         if event["logger"] in [ | ||||||
| @ -108,9 +108,12 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | |||||||
|             "multiprocessing", |             "multiprocessing", | ||||||
|             "django_redis", |             "django_redis", | ||||||
|             "django.security.DisallowedHost", |             "django.security.DisallowedHost", | ||||||
|  |             "django_redis.cache", | ||||||
|  |             "celery.backends.redis", | ||||||
|  |             "celery.worker", | ||||||
|         ]: |         ]: | ||||||
|             return None |             return None | ||||||
|     LOGGER.debug("sending event to sentry", exc=exc_value, source_logger=event.get("logger", None)) |     LOGGER.debug("sending event to sentry", exc=exc_value, source_logger=event.get("logger", None)) | ||||||
|     if settings.DEBUG: |     if settings.DEBUG or settings.TEST: | ||||||
|         return None |         return None | ||||||
|     return event |     return event | ||||||
|  | |||||||
| @ -13,4 +13,4 @@ class TestSentry(TestCase): | |||||||
|  |  | ||||||
|     def test_error_sent(self): |     def test_error_sent(self): | ||||||
|         """Test error sent""" |         """Test error sent""" | ||||||
|         self.assertEqual({}, before_send({}, {"exc_info": (0, ValueError(), 0)})) |         self.assertEqual(None, before_send({}, {"exc_info": (0, ValueError(), 0)})) | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| """base model tests""" | """base model tests""" | ||||||
| from typing import Callable, Type | from typing import Callable | ||||||
|  |  | ||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from rest_framework.serializers import BaseSerializer | from rest_framework.serializers import BaseSerializer | ||||||
| @ -13,7 +13,7 @@ class TestModels(TestCase): | |||||||
|     """Generic model properties tests""" |     """Generic model properties tests""" | ||||||
|  |  | ||||||
|  |  | ||||||
| def model_tester_factory(test_model: Type[Stage]) -> Callable: | def model_tester_factory(test_model: type[Stage]) -> Callable: | ||||||
|     """Test a form""" |     """Test a form""" | ||||||
|  |  | ||||||
|     def tester(self: TestModels): |     def tester(self: TestModels): | ||||||
|  | |||||||
| @ -4,6 +4,7 @@ from typing import Any, Optional | |||||||
|  |  | ||||||
| from django.http import HttpRequest | from django.http import HttpRequest | ||||||
| from requests.sessions import Session | from requests.sessions import Session | ||||||
|  | from sentry_sdk.hub import Hub | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | from authentik import ENV_GIT_HASH_KEY, __version__ | ||||||
| @ -52,6 +53,12 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]: | |||||||
|             fake_ip=fake_ip, |             fake_ip=fake_ip, | ||||||
|         ) |         ) | ||||||
|         return None |         return None | ||||||
|  |     # Update sentry scope to include correct IP | ||||||
|  |     user = Hub.current.scope._user | ||||||
|  |     if not user: | ||||||
|  |         user = {} | ||||||
|  |     user["ip_address"] = fake_ip | ||||||
|  |     Hub.current.scope.set_user(user) | ||||||
|     return fake_ip |     return fake_ip | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -2,7 +2,6 @@ | |||||||
| import os | import os | ||||||
| from importlib import import_module | from importlib import import_module | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from typing import Union |  | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME | from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME | ||||||
| @ -30,7 +29,7 @@ def class_to_path(cls: type) -> str: | |||||||
|     return f"{cls.__module__}.{cls.__name__}" |     return f"{cls.__module__}.{cls.__name__}" | ||||||
|  |  | ||||||
|  |  | ||||||
| def path_to_class(path: Union[str, None]) -> Union[type, None]: | def path_to_class(path: str | None) -> type | None: | ||||||
|     """Import module and return class""" |     """Import module and return class""" | ||||||
|     if not path: |     if not path: | ||||||
|         return None |         return None | ||||||
|  | |||||||
| @ -34,7 +34,7 @@ def timedelta_from_string(expr: str) -> datetime.timedelta: | |||||||
|         key, value = duration_pair.split("=") |         key, value = duration_pair.split("=") | ||||||
|         if key.lower() not in ALLOWED_KEYS: |         if key.lower() not in ALLOWED_KEYS: | ||||||
|             continue |             continue | ||||||
|         kwargs[key.lower()] = float(value) |         kwargs[key.lower()] = float(value.strip()) | ||||||
|     if len(kwargs) < 1: |     if len(kwargs) < 1: | ||||||
|         raise ValueError("No valid keys to pass to timedelta") |         raise ValueError("No valid keys to pass to timedelta") | ||||||
|     return datetime.timedelta(**kwargs) |     return datetime.timedelta(**kwargs) | ||||||
|  | |||||||
| @ -1,5 +1,5 @@ | |||||||
| """Managed objects manager""" | """Managed objects manager""" | ||||||
| from typing import Callable, Optional, Type | from typing import Callable, Optional | ||||||
|  |  | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
|  |  | ||||||
| @ -11,11 +11,11 @@ LOGGER = get_logger() | |||||||
| class EnsureOp: | class EnsureOp: | ||||||
|     """Ensure operation, executed as part of an ObjectManager run""" |     """Ensure operation, executed as part of an ObjectManager run""" | ||||||
|  |  | ||||||
|     _obj: Type[ManagedModel] |     _obj: type[ManagedModel] | ||||||
|     _managed_uid: str |     _managed_uid: str | ||||||
|     _kwargs: dict |     _kwargs: dict | ||||||
|  |  | ||||||
|     def __init__(self, obj: Type[ManagedModel], managed_uid: str, **kwargs) -> None: |     def __init__(self, obj: type[ManagedModel], managed_uid: str, **kwargs) -> None: | ||||||
|         self._obj = obj |         self._obj = obj | ||||||
|         self._managed_uid = managed_uid |         self._managed_uid = managed_uid | ||||||
|         self._kwargs = kwargs |         self._kwargs = kwargs | ||||||
| @ -32,7 +32,7 @@ class EnsureExists(EnsureOp): | |||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         obj: Type[ManagedModel], |         obj: type[ManagedModel], | ||||||
|         managed_uid: str, |         managed_uid: str, | ||||||
|         created_callback: Optional[Callable] = None, |         created_callback: Optional[Callable] = None, | ||||||
|         **kwargs, |         **kwargs, | ||||||
|  | |||||||
| @ -1,4 +1,6 @@ | |||||||
| """Outpost API Views""" | """Outpost API Views""" | ||||||
|  | from os import environ | ||||||
|  |  | ||||||
| from dacite.core import from_dict | from dacite.core import from_dict | ||||||
| from dacite.exceptions import DaciteError | from dacite.exceptions import DaciteError | ||||||
| from django_filters.filters import ModelMultipleChoiceFilter | from django_filters.filters import ModelMultipleChoiceFilter | ||||||
| @ -12,6 +14,7 @@ from rest_framework.response import Response | |||||||
| from rest_framework.serializers import JSONField, ModelSerializer, ValidationError | from rest_framework.serializers import JSONField, ModelSerializer, ValidationError | ||||||
| from rest_framework.viewsets import ModelViewSet | from rest_framework.viewsets import ModelViewSet | ||||||
|  |  | ||||||
|  | from authentik import ENV_GIT_HASH_KEY | ||||||
| from authentik.core.api.providers import ProviderSerializer | from authentik.core.api.providers import ProviderSerializer | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.core.api.utils import PassiveSerializer, is_dict | from authentik.core.api.utils import PassiveSerializer, is_dict | ||||||
| @ -98,8 +101,12 @@ class OutpostHealthSerializer(PassiveSerializer): | |||||||
|     last_seen = DateTimeField(read_only=True) |     last_seen = DateTimeField(read_only=True) | ||||||
|     version = CharField(read_only=True) |     version = CharField(read_only=True) | ||||||
|     version_should = CharField(read_only=True) |     version_should = CharField(read_only=True) | ||||||
|  |  | ||||||
|     version_outdated = BooleanField(read_only=True) |     version_outdated = BooleanField(read_only=True) | ||||||
|  |  | ||||||
|  |     build_hash = CharField(read_only=True, required=False) | ||||||
|  |     build_hash_should = CharField(read_only=True, required=False) | ||||||
|  |  | ||||||
|  |  | ||||||
| class OutpostFilter(FilterSet): | class OutpostFilter(FilterSet): | ||||||
|     """Filter for Outposts""" |     """Filter for Outposts""" | ||||||
| @ -116,6 +123,7 @@ class OutpostFilter(FilterSet): | |||||||
|             "providers": ["isnull"], |             "providers": ["isnull"], | ||||||
|             "name": ["iexact", "icontains"], |             "name": ["iexact", "icontains"], | ||||||
|             "service_connection__name": ["iexact", "icontains"], |             "service_connection__name": ["iexact", "icontains"], | ||||||
|  |             "managed": ["iexact", "icontains"], | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -145,6 +153,8 @@ class OutpostViewSet(UsedByMixin, ModelViewSet): | |||||||
|                     "version": state.version, |                     "version": state.version, | ||||||
|                     "version_should": state.version_should, |                     "version_should": state.version_should, | ||||||
|                     "version_outdated": state.version_outdated, |                     "version_outdated": state.version_outdated, | ||||||
|  |                     "build_hash": state.build_hash, | ||||||
|  |                     "build_hash_should": environ.get(ENV_GIT_HASH_KEY, ""), | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|         return Response(OutpostHealthSerializer(states, many=True).data) |         return Response(OutpostHealthSerializer(states, many=True).data) | ||||||
|  | |||||||
| @ -9,7 +9,7 @@ from dacite import from_dict | |||||||
| from dacite.data import Data | from dacite.data import Data | ||||||
| from guardian.shortcuts import get_objects_for_user | from guardian.shortcuts import get_objects_for_user | ||||||
| from prometheus_client import Gauge | from prometheus_client import Gauge | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import BoundLogger, get_logger | ||||||
|  |  | ||||||
| from authentik.core.channels import AuthJsonConsumer | from authentik.core.channels import AuthJsonConsumer | ||||||
| from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState | from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState | ||||||
| @ -23,8 +23,6 @@ GAUGE_OUTPOSTS_LAST_UPDATE = Gauge( | |||||||
|     ["outpost", "uid", "version"], |     ["outpost", "uid", "version"], | ||||||
| ) | ) | ||||||
|  |  | ||||||
| LOGGER = get_logger() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class WebsocketMessageInstruction(IntEnum): | class WebsocketMessageInstruction(IntEnum): | ||||||
|     """Commands which can be triggered over Websocket""" |     """Commands which can be triggered over Websocket""" | ||||||
| @ -51,6 +49,7 @@ class OutpostConsumer(AuthJsonConsumer): | |||||||
|     """Handler for Outposts that connect over websockets for health checks and live updates""" |     """Handler for Outposts that connect over websockets for health checks and live updates""" | ||||||
|  |  | ||||||
|     outpost: Optional[Outpost] = None |     outpost: Optional[Outpost] = None | ||||||
|  |     logger: BoundLogger | ||||||
|  |  | ||||||
|     last_uid: Optional[str] = None |     last_uid: Optional[str] = None | ||||||
|  |  | ||||||
| @ -59,11 +58,20 @@ class OutpostConsumer(AuthJsonConsumer): | |||||||
|     def connect(self): |     def connect(self): | ||||||
|         super().connect() |         super().connect() | ||||||
|         uuid = self.scope["url_route"]["kwargs"]["pk"] |         uuid = self.scope["url_route"]["kwargs"]["pk"] | ||||||
|         outpost = get_objects_for_user(self.user, "authentik_outposts.view_outpost").filter(pk=uuid) |         outpost = ( | ||||||
|         if not outpost.exists(): |             get_objects_for_user(self.user, "authentik_outposts.view_outpost") | ||||||
|  |             .filter(pk=uuid) | ||||||
|  |             .first() | ||||||
|  |         ) | ||||||
|  |         if not outpost: | ||||||
|             raise DenyConnection() |             raise DenyConnection() | ||||||
|         self.accept() |         self.logger = get_logger().bind(outpost=outpost) | ||||||
|         self.outpost = outpost.first() |         try: | ||||||
|  |             self.accept() | ||||||
|  |         except RuntimeError as exc: | ||||||
|  |             self.logger.warning("runtime error during accept", exc=exc) | ||||||
|  |             raise DenyConnection() | ||||||
|  |         self.outpost = outpost | ||||||
|         self.last_uid = self.channel_name |         self.last_uid = self.channel_name | ||||||
|  |  | ||||||
|     # pylint: disable=unused-argument |     # pylint: disable=unused-argument | ||||||
| @ -78,9 +86,8 @@ class OutpostConsumer(AuthJsonConsumer): | |||||||
|                 uid=self.last_uid, |                 uid=self.last_uid, | ||||||
|                 expected=self.outpost.config.kubernetes_replicas, |                 expected=self.outpost.config.kubernetes_replicas, | ||||||
|             ).dec() |             ).dec() | ||||||
|         LOGGER.debug( |         self.logger.debug( | ||||||
|             "removed outpost instance from cache", |             "removed outpost instance from cache", | ||||||
|             outpost=self.outpost, |  | ||||||
|             instance_uuid=self.last_uid, |             instance_uuid=self.last_uid, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
| @ -103,9 +110,8 @@ class OutpostConsumer(AuthJsonConsumer): | |||||||
|                 uid=self.last_uid, |                 uid=self.last_uid, | ||||||
|                 expected=self.outpost.config.kubernetes_replicas, |                 expected=self.outpost.config.kubernetes_replicas, | ||||||
|             ).inc() |             ).inc() | ||||||
|             LOGGER.debug( |             self.logger.debug( | ||||||
|                 "added outpost instance to cache", |                 "added outpost instance to cache", | ||||||
|                 outpost=self.outpost, |  | ||||||
|                 instance_uuid=self.last_uid, |                 instance_uuid=self.last_uid, | ||||||
|             ) |             ) | ||||||
|             self.first_msg = True |             self.first_msg = True | ||||||
|  | |||||||
| @ -9,7 +9,11 @@ from structlog.testing import capture_logs | |||||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | from authentik import ENV_GIT_HASH_KEY, __version__ | ||||||
| from authentik.lib.config import CONFIG | from authentik.lib.config import CONFIG | ||||||
| from authentik.lib.sentry import SentryIgnoredException | from authentik.lib.sentry import SentryIgnoredException | ||||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | from authentik.outposts.models import ( | ||||||
|  |     Outpost, | ||||||
|  |     OutpostServiceConnection, | ||||||
|  |     OutpostServiceConnectionState, | ||||||
|  | ) | ||||||
|  |  | ||||||
| FIELD_MANAGER = "goauthentik.io" | FIELD_MANAGER = "goauthentik.io" | ||||||
|  |  | ||||||
| @ -28,11 +32,25 @@ class DeploymentPort: | |||||||
|     inner_port: Optional[int] = None |     inner_port: Optional[int] = None | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BaseClient: | ||||||
|  |     """Base class for custom clients""" | ||||||
|  |  | ||||||
|  |     def fetch_state(self) -> OutpostServiceConnectionState: | ||||||
|  |         """Get state, version info""" | ||||||
|  |         raise NotImplementedError | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __exit__(self, exc_type, exc_value, traceback): | ||||||
|  |         """Cleanup after usage""" | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseController: | class BaseController: | ||||||
|     """Base Outpost deployment controller""" |     """Base Outpost deployment controller""" | ||||||
|  |  | ||||||
|     deployment_ports: list[DeploymentPort] |     deployment_ports: list[DeploymentPort] | ||||||
|  |     client: BaseClient | ||||||
|     outpost: Outpost |     outpost: Outpost | ||||||
|     connection: OutpostServiceConnection |     connection: OutpostServiceConnection | ||||||
|  |  | ||||||
| @ -63,6 +81,14 @@ class BaseController: | |||||||
|             self.down() |             self.down() | ||||||
|         return [x["event"] for x in logs] |         return [x["event"] for x in logs] | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def __exit__(self, exc_type, exc_value, traceback): | ||||||
|  |         """Cleanup after usage""" | ||||||
|  |         if hasattr(self, "client"): | ||||||
|  |             self.client.__exit__(exc_type, exc_value, traceback) | ||||||
|  |  | ||||||
|     def get_static_deployment(self) -> str: |     def get_static_deployment(self) -> str: | ||||||
|         """Return a static deployment configuration""" |         """Return a static deployment configuration""" | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
|  | |||||||
| @ -1,17 +1,75 @@ | |||||||
| """Docker controller""" | """Docker controller""" | ||||||
| from time import sleep | from time import sleep | ||||||
|  | from typing import Optional | ||||||
|  | from urllib.parse import urlparse | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.utils.text import slugify | from django.utils.text import slugify | ||||||
| from docker import DockerClient | from docker import DockerClient as UpstreamDockerClient | ||||||
| from docker.errors import DockerException, NotFound | from docker.errors import DockerException, NotFound | ||||||
| from docker.models.containers import Container | from docker.models.containers import Container | ||||||
|  | from docker.utils.utils import kwargs_from_env | ||||||
|  | from structlog.stdlib import get_logger | ||||||
| from yaml import safe_dump | from yaml import safe_dump | ||||||
|  |  | ||||||
| from authentik import __version__ | from authentik import __version__ | ||||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||||
|  | from authentik.outposts.docker_ssh import DockerInlineSSH | ||||||
|  | from authentik.outposts.docker_tls import DockerInlineTLS | ||||||
| from authentik.outposts.managed import MANAGED_OUTPOST | from authentik.outposts.managed import MANAGED_OUTPOST | ||||||
| from authentik.outposts.models import DockerServiceConnection, Outpost, ServiceConnectionInvalid | from authentik.outposts.models import ( | ||||||
|  |     DockerServiceConnection, | ||||||
|  |     Outpost, | ||||||
|  |     OutpostServiceConnectionState, | ||||||
|  |     ServiceConnectionInvalid, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DockerClient(UpstreamDockerClient, BaseClient): | ||||||
|  |     """Custom docker client, which can handle TLS and SSH from a database.""" | ||||||
|  |  | ||||||
|  |     tls: Optional[DockerInlineTLS] | ||||||
|  |     ssh: Optional[DockerInlineSSH] | ||||||
|  |  | ||||||
|  |     def __init__(self, connection: DockerServiceConnection): | ||||||
|  |         self.tls = None | ||||||
|  |         self.ssh = None | ||||||
|  |         if connection.local: | ||||||
|  |             # Same result as DockerClient.from_env | ||||||
|  |             super().__init__(**kwargs_from_env()) | ||||||
|  |         else: | ||||||
|  |             parsed_url = urlparse(connection.url) | ||||||
|  |             tls_config = False | ||||||
|  |             if parsed_url.scheme == "ssh": | ||||||
|  |                 self.ssh = DockerInlineSSH(parsed_url.hostname, connection.tls_authentication) | ||||||
|  |                 self.ssh.write() | ||||||
|  |             else: | ||||||
|  |                 self.tls = DockerInlineTLS( | ||||||
|  |                     verification_kp=connection.tls_verification, | ||||||
|  |                     authentication_kp=connection.tls_authentication, | ||||||
|  |                 ) | ||||||
|  |                 tls_config = self.tls.write() | ||||||
|  |             super().__init__( | ||||||
|  |                 base_url=connection.url, | ||||||
|  |                 tls=tls_config, | ||||||
|  |             ) | ||||||
|  |         self.logger = get_logger() | ||||||
|  |         # Ensure the client actually works | ||||||
|  |         self.containers.list() | ||||||
|  |  | ||||||
|  |     def fetch_state(self) -> OutpostServiceConnectionState: | ||||||
|  |         try: | ||||||
|  |             return OutpostServiceConnectionState(version=self.info()["ServerVersion"], healthy=True) | ||||||
|  |         except (ServiceConnectionInvalid, DockerException): | ||||||
|  |             return OutpostServiceConnectionState(version="", healthy=False) | ||||||
|  |  | ||||||
|  |     def __exit__(self, exc_type, exc_value, traceback): | ||||||
|  |         if self.tls: | ||||||
|  |             self.logger.debug("Cleaning up TLS") | ||||||
|  |             self.tls.cleanup() | ||||||
|  |         if self.ssh: | ||||||
|  |             self.logger.debug("Cleaning up SSH") | ||||||
|  |             self.ssh.cleanup() | ||||||
|  |  | ||||||
|  |  | ||||||
| class DockerController(BaseController): | class DockerController(BaseController): | ||||||
| @ -24,9 +82,12 @@ class DockerController(BaseController): | |||||||
|  |  | ||||||
|     def __init__(self, outpost: Outpost, connection: DockerServiceConnection) -> None: |     def __init__(self, outpost: Outpost, connection: DockerServiceConnection) -> None: | ||||||
|         super().__init__(outpost, connection) |         super().__init__(outpost, connection) | ||||||
|  |         if outpost.managed == MANAGED_OUTPOST: | ||||||
|  |             return | ||||||
|         try: |         try: | ||||||
|             self.client = connection.client() |             self.client = DockerClient(connection) | ||||||
|         except ServiceConnectionInvalid as exc: |         except DockerException as exc: | ||||||
|  |             self.logger.warning(exc) | ||||||
|             raise ControllerException from exc |             raise ControllerException from exc | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @ -108,7 +169,7 @@ class DockerController(BaseController): | |||||||
|         image = self.get_container_image() |         image = self.get_container_image() | ||||||
|         try: |         try: | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         except DockerException: |         except DockerException:  # pragma: no cover | ||||||
|             image = f"goauthentik.io/{self.outpost.type}:latest" |             image = f"goauthentik.io/{self.outpost.type}:latest" | ||||||
|             self.client.images.pull(image) |             self.client.images.pull(image) | ||||||
|         return image |         return image | ||||||
| @ -142,7 +203,7 @@ class DockerController(BaseController): | |||||||
|                 True, |                 True, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|     def _migrate_container_name(self): |     def _migrate_container_name(self):  # pragma: no cover | ||||||
|         """Migrate 2021.9 to 2021.10+""" |         """Migrate 2021.9 to 2021.10+""" | ||||||
|         old_name = f"authentik-proxy-{self.outpost.uuid.hex}" |         old_name = f"authentik-proxy-{self.outpost.uuid.hex}" | ||||||
|         try: |         try: | ||||||
| @ -167,7 +228,7 @@ class DockerController(BaseController): | |||||||
|             # Check if the container is out of date, delete it and retry |             # Check if the container is out of date, delete it and retry | ||||||
|             if len(container.image.tags) > 0: |             if len(container.image.tags) > 0: | ||||||
|                 should_image = self.try_pull_image() |                 should_image = self.try_pull_image() | ||||||
|                 if should_image not in container.image.tags: |                 if should_image not in container.image.tags:  # pragma: no cover | ||||||
|                     self.logger.info( |                     self.logger.info( | ||||||
|                         "Container has mismatched image, re-creating...", |                         "Container has mismatched image, re-creating...", | ||||||
|                         has=container.image.tags, |                         has=container.image.tags, | ||||||
| @ -225,12 +286,14 @@ class DockerController(BaseController): | |||||||
|             raise ControllerException(str(exc)) from exc |             raise ControllerException(str(exc)) from exc | ||||||
|  |  | ||||||
|     def down(self): |     def down(self): | ||||||
|         if self.outpost.managed != MANAGED_OUTPOST: |         if self.outpost.managed == MANAGED_OUTPOST: | ||||||
|             return |             return | ||||||
|         try: |         try: | ||||||
|             container, _ = self._get_container() |             container, _ = self._get_container() | ||||||
|             if container.status == "running": |             if container.status == "running": | ||||||
|  |                 self.logger.info("Stopping container.") | ||||||
|                 container.kill() |                 container.kill() | ||||||
|  |             self.logger.info("Removing container.") | ||||||
|             container.remove(force=True) |             container.remove(force=True) | ||||||
|         except DockerException as exc: |         except DockerException as exc: | ||||||
|             raise ControllerException(str(exc)) from exc |             raise ControllerException(str(exc)) from exc | ||||||
|  | |||||||
| @ -20,6 +20,11 @@ if TYPE_CHECKING: | |||||||
| T = TypeVar("T", V1Pod, V1Deployment) | T = TypeVar("T", V1Pod, V1Deployment) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_version() -> str: | ||||||
|  |     """Wrapper for __version__ to make testing easier""" | ||||||
|  |     return __version__ | ||||||
|  |  | ||||||
|  |  | ||||||
| class KubernetesObjectReconciler(Generic[T]): | class KubernetesObjectReconciler(Generic[T]): | ||||||
|     """Base Kubernetes Reconciler, handles the basic logic.""" |     """Base Kubernetes Reconciler, handles the basic logic.""" | ||||||
|  |  | ||||||
| @ -146,13 +151,13 @@ class KubernetesObjectReconciler(Generic[T]): | |||||||
|         return V1ObjectMeta( |         return V1ObjectMeta( | ||||||
|             namespace=self.namespace, |             namespace=self.namespace, | ||||||
|             labels={ |             labels={ | ||||||
|                 "app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}", |  | ||||||
|                 "app.kubernetes.io/instance": slugify(self.controller.outpost.name), |                 "app.kubernetes.io/instance": slugify(self.controller.outpost.name), | ||||||
|                 "app.kubernetes.io/version": __version__, |  | ||||||
|                 "app.kubernetes.io/managed-by": "goauthentik.io", |                 "app.kubernetes.io/managed-by": "goauthentik.io", | ||||||
|                 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, |                 "app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}", | ||||||
|                 "goauthentik.io/outpost-type": str(self.controller.outpost.type), |                 "app.kubernetes.io/version": get_version(), | ||||||
|                 "goauthentik.io/outpost-name": slugify(self.controller.outpost.name), |                 "goauthentik.io/outpost-name": slugify(self.controller.outpost.name), | ||||||
|  |                 "goauthentik.io/outpost-type": str(self.controller.outpost.type), | ||||||
|  |                 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex, | ||||||
|             }, |             }, | ||||||
|             **kwargs, |             **kwargs, | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -1,34 +1,67 @@ | |||||||
| """Kubernetes deployment controller""" | """Kubernetes deployment controller""" | ||||||
| from io import StringIO | from io import StringIO | ||||||
| from typing import Type |  | ||||||
|  |  | ||||||
|  | from kubernetes.client import VersionApi, VersionInfo | ||||||
| from kubernetes.client.api_client import ApiClient | from kubernetes.client.api_client import ApiClient | ||||||
|  | from kubernetes.client.configuration import Configuration | ||||||
| from kubernetes.client.exceptions import OpenApiException | from kubernetes.client.exceptions import OpenApiException | ||||||
|  | from kubernetes.config.config_exception import ConfigException | ||||||
|  | from kubernetes.config.incluster_config import load_incluster_config | ||||||
|  | from kubernetes.config.kube_config import load_kube_config_from_dict | ||||||
| from structlog.testing import capture_logs | from structlog.testing import capture_logs | ||||||
| from urllib3.exceptions import HTTPError | from urllib3.exceptions import HTTPError | ||||||
| from yaml import dump_all | from yaml import dump_all | ||||||
|  |  | ||||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException | ||||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||||
| from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler | from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler | ||||||
| from authentik.outposts.controllers.k8s.secret import SecretReconciler | from authentik.outposts.controllers.k8s.secret import SecretReconciler | ||||||
| from authentik.outposts.controllers.k8s.service import ServiceReconciler | from authentik.outposts.controllers.k8s.service import ServiceReconciler | ||||||
| from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler | from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler | ||||||
| from authentik.outposts.models import KubernetesServiceConnection, Outpost, ServiceConnectionInvalid | from authentik.outposts.models import ( | ||||||
|  |     KubernetesServiceConnection, | ||||||
|  |     Outpost, | ||||||
|  |     OutpostServiceConnectionState, | ||||||
|  |     ServiceConnectionInvalid, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class KubernetesClient(ApiClient, BaseClient): | ||||||
|  |     """Custom kubernetes client based on service connection""" | ||||||
|  |  | ||||||
|  |     def __init__(self, connection: KubernetesServiceConnection): | ||||||
|  |         config = Configuration() | ||||||
|  |         try: | ||||||
|  |             if connection.local: | ||||||
|  |                 load_incluster_config(client_configuration=config) | ||||||
|  |             else: | ||||||
|  |                 load_kube_config_from_dict(connection.kubeconfig, client_configuration=config) | ||||||
|  |             super().__init__(config) | ||||||
|  |         except ConfigException as exc: | ||||||
|  |             raise ServiceConnectionInvalid from exc | ||||||
|  |  | ||||||
|  |     def fetch_state(self) -> OutpostServiceConnectionState: | ||||||
|  |         """Get version info""" | ||||||
|  |         try: | ||||||
|  |             api_instance = VersionApi(self) | ||||||
|  |             version: VersionInfo = api_instance.get_code() | ||||||
|  |             return OutpostServiceConnectionState(version=version.git_version, healthy=True) | ||||||
|  |         except (OpenApiException, HTTPError, ServiceConnectionInvalid): | ||||||
|  |             return OutpostServiceConnectionState(version="", healthy=False) | ||||||
|  |  | ||||||
|  |  | ||||||
| class KubernetesController(BaseController): | class KubernetesController(BaseController): | ||||||
|     """Manage deployment of outpost in kubernetes""" |     """Manage deployment of outpost in kubernetes""" | ||||||
|  |  | ||||||
|     reconcilers: dict[str, Type[KubernetesObjectReconciler]] |     reconcilers: dict[str, type[KubernetesObjectReconciler]] | ||||||
|     reconcile_order: list[str] |     reconcile_order: list[str] | ||||||
|  |  | ||||||
|     client: ApiClient |     client: KubernetesClient | ||||||
|     connection: KubernetesServiceConnection |     connection: KubernetesServiceConnection | ||||||
|  |  | ||||||
|     def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None: |     def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None: | ||||||
|         super().__init__(outpost, connection) |         super().__init__(outpost, connection) | ||||||
|         self.client = connection.client() |         self.client = KubernetesClient(connection) | ||||||
|         self.reconcilers = { |         self.reconcilers = { | ||||||
|             "secret": SecretReconciler, |             "secret": SecretReconciler, | ||||||
|             "deployment": DeploymentReconciler, |             "deployment": DeploymentReconciler, | ||||||
|  | |||||||
							
								
								
									
										82
									
								
								authentik/outposts/docker_ssh.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								authentik/outposts/docker_ssh.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,82 @@ | |||||||
|  | """Docker SSH helper""" | ||||||
|  | import os | ||||||
|  | from pathlib import Path | ||||||
|  | from tempfile import gettempdir | ||||||
|  |  | ||||||
|  | from authentik.crypto.models import CertificateKeyPair | ||||||
|  |  | ||||||
|  | HEADER = "### Managed by authentik" | ||||||
|  | FOOTER = "### End Managed by authentik" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def opener(path, flags): | ||||||
|  |     """File opener to create files as 700 perms""" | ||||||
|  |     return os.open(path, flags, 0o700) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DockerInlineSSH: | ||||||
|  |     """Create paramiko ssh config from CertificateKeyPair""" | ||||||
|  |  | ||||||
|  |     host: str | ||||||
|  |     keypair: CertificateKeyPair | ||||||
|  |  | ||||||
|  |     key_path: str | ||||||
|  |     config_path: Path | ||||||
|  |     header: str | ||||||
|  |  | ||||||
|  |     def __init__(self, host: str, keypair: CertificateKeyPair) -> None: | ||||||
|  |         self.host = host | ||||||
|  |         self.keypair = keypair | ||||||
|  |         self.config_path = Path("~/.ssh/config").expanduser() | ||||||
|  |         self.header = f"{HEADER} - {self.host}\n" | ||||||
|  |  | ||||||
|  |     def write_config(self, key_path: str) -> bool: | ||||||
|  |         """Update the local user's ssh config file""" | ||||||
|  |         with open(self.config_path, "a+", encoding="utf-8") as ssh_config: | ||||||
|  |             if self.header in ssh_config.readlines(): | ||||||
|  |                 return False | ||||||
|  |             ssh_config.writelines( | ||||||
|  |                 [ | ||||||
|  |                     self.header, | ||||||
|  |                     f"Host {self.host}\n", | ||||||
|  |                     f"    IdentityFile {key_path}\n", | ||||||
|  |                     f"{FOOTER}\n", | ||||||
|  |                     "\n", | ||||||
|  |                 ] | ||||||
|  |             ) | ||||||
|  |         return True | ||||||
|  |  | ||||||
|  |     def write_key(self): | ||||||
|  |         """Write keypair's private key to a temporary file""" | ||||||
|  |         path = Path(gettempdir(), f"{self.keypair.pk}_private.pem") | ||||||
|  |         with open(path, "w", encoding="utf8", opener=opener) as _file: | ||||||
|  |             _file.write(self.keypair.key_data) | ||||||
|  |         return str(path) | ||||||
|  |  | ||||||
|  |     def write(self): | ||||||
|  |         """Write keyfile and update ssh config""" | ||||||
|  |         self.key_path = self.write_key() | ||||||
|  |         was_written = self.write_config(self.key_path) | ||||||
|  |         if not was_written: | ||||||
|  |             self.cleanup() | ||||||
|  |  | ||||||
|  |     def cleanup(self): | ||||||
|  |         """Cleanup when we're done""" | ||||||
|  |         try: | ||||||
|  |             os.unlink(self.key_path) | ||||||
|  |             with open(self.config_path, "r+", encoding="utf-8") as ssh_config: | ||||||
|  |                 start = 0 | ||||||
|  |                 end = 0 | ||||||
|  |                 lines = ssh_config.readlines() | ||||||
|  |                 for idx, line in enumerate(lines): | ||||||
|  |                     if line == self.header: | ||||||
|  |                         start = idx | ||||||
|  |                     if start != 0 and line == f"{FOOTER}\n": | ||||||
|  |                         end = idx | ||||||
|  |             with open(self.config_path, "w+", encoding="utf-8") as ssh_config: | ||||||
|  |                 lines = lines[:start] + lines[end + 2 :] | ||||||
|  |                 ssh_config.writelines(lines) | ||||||
|  |         except OSError: | ||||||
|  |             # If we fail deleting a file it doesn't matter that much | ||||||
|  |             # since we're just in a container | ||||||
|  |             pass | ||||||
| @ -1,4 +1,5 @@ | |||||||
| """Create Docker TLSConfig from CertificateKeyPair""" | """Create Docker TLSConfig from CertificateKeyPair""" | ||||||
|  | from os import unlink | ||||||
| from pathlib import Path | from pathlib import Path | ||||||
| from tempfile import gettempdir | from tempfile import gettempdir | ||||||
| from typing import Optional | from typing import Optional | ||||||
| @ -14,6 +15,8 @@ class DockerInlineTLS: | |||||||
|     verification_kp: Optional[CertificateKeyPair] |     verification_kp: Optional[CertificateKeyPair] | ||||||
|     authentication_kp: Optional[CertificateKeyPair] |     authentication_kp: Optional[CertificateKeyPair] | ||||||
|  |  | ||||||
|  |     _paths: list[str] | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         verification_kp: Optional[CertificateKeyPair], |         verification_kp: Optional[CertificateKeyPair], | ||||||
| @ -21,14 +24,21 @@ class DockerInlineTLS: | |||||||
|     ) -> None: |     ) -> None: | ||||||
|         self.verification_kp = verification_kp |         self.verification_kp = verification_kp | ||||||
|         self.authentication_kp = authentication_kp |         self.authentication_kp = authentication_kp | ||||||
|  |         self._paths = [] | ||||||
|  |  | ||||||
|     def write_file(self, name: str, contents: str) -> str: |     def write_file(self, name: str, contents: str) -> str: | ||||||
|         """Wrapper for mkstemp that uses fdopen""" |         """Wrapper for mkstemp that uses fdopen""" | ||||||
|         path = Path(gettempdir(), name) |         path = Path(gettempdir(), name) | ||||||
|         with open(path, "w", encoding="utf8") as _file: |         with open(path, "w", encoding="utf8") as _file: | ||||||
|             _file.write(contents) |             _file.write(contents) | ||||||
|  |         self._paths.append(str(path)) | ||||||
|         return str(path) |         return str(path) | ||||||
|  |  | ||||||
|  |     def cleanup(self): | ||||||
|  |         """Clean up certificates when we're done""" | ||||||
|  |         for path in self._paths: | ||||||
|  |             unlink(path) | ||||||
|  |  | ||||||
|     def write(self) -> TLSConfig: |     def write(self) -> TLSConfig: | ||||||
|         """Create TLSConfig with Certificate Key pairs""" |         """Create TLSConfig with Certificate Key pairs""" | ||||||
|         # So yes, this is quite ugly. But sadly, there is no clean way to pass |         # So yes, this is quite ugly. But sadly, there is no clean way to pass | ||||||
|  | |||||||
| @ -2,7 +2,7 @@ | |||||||
| from dataclasses import asdict, dataclass, field | from dataclasses import asdict, dataclass, field | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from os import environ | from os import environ | ||||||
| from typing import Iterable, Optional, Union | from typing import Iterable, Optional | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
|  |  | ||||||
| from dacite import from_dict | from dacite import from_dict | ||||||
| @ -11,21 +11,11 @@ from django.core.cache import cache | |||||||
| from django.db import IntegrityError, models, transaction | from django.db import IntegrityError, models, transaction | ||||||
| from django.db.models.base import Model | from django.db.models.base import Model | ||||||
| from django.utils.translation import gettext_lazy as _ | from django.utils.translation import gettext_lazy as _ | ||||||
| from docker.client import DockerClient |  | ||||||
| from docker.errors import DockerException |  | ||||||
| from guardian.models import UserObjectPermission | from guardian.models import UserObjectPermission | ||||||
| from guardian.shortcuts import assign_perm | from guardian.shortcuts import assign_perm | ||||||
| from kubernetes.client import VersionApi, VersionInfo |  | ||||||
| from kubernetes.client.api_client import ApiClient |  | ||||||
| from kubernetes.client.configuration import Configuration |  | ||||||
| from kubernetes.client.exceptions import OpenApiException |  | ||||||
| from kubernetes.config.config_exception import ConfigException |  | ||||||
| from kubernetes.config.incluster_config import load_incluster_config |  | ||||||
| from kubernetes.config.kube_config import load_kube_config_from_dict |  | ||||||
| from model_utils.managers import InheritanceManager | from model_utils.managers import InheritanceManager | ||||||
| from packaging.version import LegacyVersion, Version, parse | from packaging.version import LegacyVersion, Version, parse | ||||||
| from structlog.stdlib import get_logger | from structlog.stdlib import get_logger | ||||||
| from urllib3.exceptions import HTTPError |  | ||||||
|  |  | ||||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | from authentik import ENV_GIT_HASH_KEY, __version__ | ||||||
| from authentik.core.models import ( | from authentik.core.models import ( | ||||||
| @ -44,7 +34,7 @@ from authentik.lib.sentry import SentryIgnoredException | |||||||
| from authentik.lib.utils.errors import exception_to_string | from authentik.lib.utils.errors import exception_to_string | ||||||
| from authentik.managed.models import ManagedModel | from authentik.managed.models import ManagedModel | ||||||
| from authentik.outposts.controllers.k8s.utils import get_namespace | from authentik.outposts.controllers.k8s.utils import get_namespace | ||||||
| from authentik.outposts.docker_tls import DockerInlineTLS | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| OUR_VERSION = parse(__version__) | OUR_VERSION = parse(__version__) | ||||||
| OUTPOST_HELLO_INTERVAL = 10 | OUTPOST_HELLO_INTERVAL = 10 | ||||||
| @ -86,7 +76,7 @@ class OutpostConfig: | |||||||
| class OutpostModel(Model): | class OutpostModel(Model): | ||||||
|     """Base model for providers that need more objects than just themselves""" |     """Base model for providers that need more objects than just themselves""" | ||||||
|  |  | ||||||
|     def get_required_objects(self) -> Iterable[Union[models.Model, str]]: |     def get_required_objects(self) -> Iterable[models.Model | str]: | ||||||
|         """Return a list of all required objects""" |         """Return a list of all required objects""" | ||||||
|         return [self] |         return [self] | ||||||
|  |  | ||||||
| @ -149,10 +139,6 @@ class OutpostServiceConnection(models.Model): | |||||||
|             return OutpostServiceConnectionState("", False) |             return OutpostServiceConnectionState("", False) | ||||||
|         return state |         return state | ||||||
|  |  | ||||||
|     def fetch_state(self) -> OutpostServiceConnectionState: |  | ||||||
|         """Fetch current Service Connection state""" |  | ||||||
|         raise NotImplementedError |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         """Return component used to edit this object""" |         """Return component used to edit this object""" | ||||||
| @ -210,35 +196,6 @@ class DockerServiceConnection(OutpostServiceConnection): | |||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"Docker Service-Connection {self.name}" |         return f"Docker Service-Connection {self.name}" | ||||||
|  |  | ||||||
|     def client(self) -> DockerClient: |  | ||||||
|         """Get DockerClient""" |  | ||||||
|         try: |  | ||||||
|             client = None |  | ||||||
|             if self.local: |  | ||||||
|                 client = DockerClient.from_env() |  | ||||||
|             else: |  | ||||||
|                 client = DockerClient( |  | ||||||
|                     base_url=self.url, |  | ||||||
|                     tls=DockerInlineTLS( |  | ||||||
|                         verification_kp=self.tls_verification, |  | ||||||
|                         authentication_kp=self.tls_authentication, |  | ||||||
|                     ).write(), |  | ||||||
|                 ) |  | ||||||
|             client.containers.list() |  | ||||||
|         except DockerException as exc: |  | ||||||
|             LOGGER.warning(exc) |  | ||||||
|             raise ServiceConnectionInvalid from exc |  | ||||||
|         return client |  | ||||||
|  |  | ||||||
|     def fetch_state(self) -> OutpostServiceConnectionState: |  | ||||||
|         try: |  | ||||||
|             client = self.client() |  | ||||||
|             return OutpostServiceConnectionState( |  | ||||||
|                 version=client.info()["ServerVersion"], healthy=True |  | ||||||
|             ) |  | ||||||
|         except ServiceConnectionInvalid: |  | ||||||
|             return OutpostServiceConnectionState(version="", healthy=False) |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  |  | ||||||
|         verbose_name = _("Docker Service-Connection") |         verbose_name = _("Docker Service-Connection") | ||||||
| @ -265,27 +222,6 @@ class KubernetesServiceConnection(OutpostServiceConnection): | |||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"Kubernetes Service-Connection {self.name}" |         return f"Kubernetes Service-Connection {self.name}" | ||||||
|  |  | ||||||
|     def fetch_state(self) -> OutpostServiceConnectionState: |  | ||||||
|         try: |  | ||||||
|             client = self.client() |  | ||||||
|             api_instance = VersionApi(client) |  | ||||||
|             version: VersionInfo = api_instance.get_code() |  | ||||||
|             return OutpostServiceConnectionState(version=version.git_version, healthy=True) |  | ||||||
|         except (OpenApiException, HTTPError, ServiceConnectionInvalid): |  | ||||||
|             return OutpostServiceConnectionState(version="", healthy=False) |  | ||||||
|  |  | ||||||
|     def client(self) -> ApiClient: |  | ||||||
|         """Get Kubernetes client configured from kubeconfig""" |  | ||||||
|         config = Configuration() |  | ||||||
|         try: |  | ||||||
|             if self.local: |  | ||||||
|                 load_incluster_config(client_configuration=config) |  | ||||||
|             else: |  | ||||||
|                 load_kube_config_from_dict(self.kubeconfig, client_configuration=config) |  | ||||||
|             return ApiClient(config) |  | ||||||
|         except ConfigException as exc: |  | ||||||
|             raise ServiceConnectionInvalid from exc |  | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|  |  | ||||||
|         verbose_name = _("Kubernetes Service-Connection") |         verbose_name = _("Kubernetes Service-Connection") | ||||||
| @ -385,7 +321,8 @@ class Outpost(ManagedModel): | |||||||
|                     user.user_permissions.add(permission.first()) |                     user.user_permissions.add(permission.first()) | ||||||
|         LOGGER.debug( |         LOGGER.debug( | ||||||
|             "Updated service account's permissions", |             "Updated service account's permissions", | ||||||
|             perms=UserObjectPermission.objects.filter(user=user), |             obj_perms=UserObjectPermission.objects.filter(user=user), | ||||||
|  |             perms=user.user_permissions.all(), | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @ -401,6 +338,7 @@ class Outpost(ManagedModel): | |||||||
|             user = users.first() |             user = users.first() | ||||||
|         user.attributes[USER_ATTRIBUTE_SA] = True |         user.attributes[USER_ATTRIBUTE_SA] = True | ||||||
|         user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True |         user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True | ||||||
|  |         user.name = f"Outpost {self.name} Service-Account" | ||||||
|         user.save() |         user.save() | ||||||
|         if should_create_user: |         if should_create_user: | ||||||
|             self.build_user_permissions(user) |             self.build_user_permissions(user) | ||||||
| @ -437,9 +375,9 @@ class Outpost(ManagedModel): | |||||||
|             Token.objects.filter(identifier=self.token_identifier).delete() |             Token.objects.filter(identifier=self.token_identifier).delete() | ||||||
|             return self.token |             return self.token | ||||||
|  |  | ||||||
|     def get_required_objects(self) -> Iterable[Union[models.Model, str]]: |     def get_required_objects(self) -> Iterable[models.Model | str]: | ||||||
|         """Get an iterator of all objects the user needs read access to""" |         """Get an iterator of all objects the user needs read access to""" | ||||||
|         objects: list[Union[models.Model, str]] = [ |         objects: list[models.Model | str] = [ | ||||||
|             self, |             self, | ||||||
|             "authentik_events.add_event", |             "authentik_events.add_event", | ||||||
|         ] |         ] | ||||||
| @ -448,6 +386,10 @@ class Outpost(ManagedModel): | |||||||
|                 objects.extend(provider.get_required_objects()) |                 objects.extend(provider.get_required_objects()) | ||||||
|             else: |             else: | ||||||
|                 objects.append(provider) |                 objects.append(provider) | ||||||
|  |         if self.managed: | ||||||
|  |             for tenant in Tenant.objects.filter(web_certificate__isnull=False): | ||||||
|  |                 objects.append(tenant) | ||||||
|  |                 objects.append(tenant.web_certificate) | ||||||
|         return objects |         return objects | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
| @ -462,7 +404,7 @@ class OutpostState: | |||||||
|     channel_ids: list[str] = field(default_factory=list) |     channel_ids: list[str] = field(default_factory=list) | ||||||
|     last_seen: Optional[datetime] = field(default=None) |     last_seen: Optional[datetime] = field(default=None) | ||||||
|     version: Optional[str] = field(default=None) |     version: Optional[str] = field(default=None) | ||||||
|     version_should: Union[Version, LegacyVersion] = field(default=OUR_VERSION) |     version_should: Version | LegacyVersion = field(default=OUR_VERSION) | ||||||
|     build_hash: str = field(default="") |     build_hash: str = field(default="") | ||||||
|  |  | ||||||
|     _outpost: Optional[Outpost] = field(default=None) |     _outpost: Optional[Outpost] = field(default=None) | ||||||
| @ -480,6 +422,8 @@ class OutpostState: | |||||||
|     def for_outpost(outpost: Outpost) -> list["OutpostState"]: |     def for_outpost(outpost: Outpost) -> list["OutpostState"]: | ||||||
|         """Get all states for an outpost""" |         """Get all states for an outpost""" | ||||||
|         keys = cache.keys(f"{outpost.state_cache_prefix}_*") |         keys = cache.keys(f"{outpost.state_cache_prefix}_*") | ||||||
|  |         if not keys: | ||||||
|  |             return [] | ||||||
|         states = [] |         states = [] | ||||||
|         for key in keys: |         for key in keys: | ||||||
|             instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "") |             instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "") | ||||||
|  | |||||||
| @ -10,6 +10,7 @@ from authentik.crypto.models import CertificateKeyPair | |||||||
| from authentik.lib.utils.reflection import class_to_path | from authentik.lib.utils.reflection import class_to_path | ||||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||||
| from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save | from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save | ||||||
|  | from authentik.tenants.models import Tenant | ||||||
|  |  | ||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| UPDATE_TRIGGERING_MODELS = ( | UPDATE_TRIGGERING_MODELS = ( | ||||||
| @ -17,6 +18,7 @@ UPDATE_TRIGGERING_MODELS = ( | |||||||
|     OutpostServiceConnection, |     OutpostServiceConnection, | ||||||
|     Provider, |     Provider, | ||||||
|     CertificateKeyPair, |     CertificateKeyPair, | ||||||
|  |     Tenant, | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | |||||||
| @ -25,6 +25,8 @@ from authentik.events.monitored_tasks import ( | |||||||
| ) | ) | ||||||
| from authentik.lib.utils.reflection import path_to_class | from authentik.lib.utils.reflection import path_to_class | ||||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | from authentik.outposts.controllers.base import BaseController, ControllerException | ||||||
|  | from authentik.outposts.controllers.docker import DockerClient | ||||||
|  | from authentik.outposts.controllers.kubernetes import KubernetesClient | ||||||
| from authentik.outposts.models import ( | from authentik.outposts.models import ( | ||||||
|     DockerServiceConnection, |     DockerServiceConnection, | ||||||
|     KubernetesServiceConnection, |     KubernetesServiceConnection, | ||||||
| @ -45,21 +47,21 @@ LOGGER = get_logger() | |||||||
| CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s" | CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s" | ||||||
|  |  | ||||||
|  |  | ||||||
| def controller_for_outpost(outpost: Outpost) -> Optional[BaseController]: | def controller_for_outpost(outpost: Outpost) -> Optional[type[BaseController]]: | ||||||
|     """Get a controller for the outpost, when a service connection is defined""" |     """Get a controller for the outpost, when a service connection is defined""" | ||||||
|     if not outpost.service_connection: |     if not outpost.service_connection: | ||||||
|         return None |         return None | ||||||
|     service_connection = outpost.service_connection |     service_connection = outpost.service_connection | ||||||
|     if outpost.type == OutpostType.PROXY: |     if outpost.type == OutpostType.PROXY: | ||||||
|         if isinstance(service_connection, DockerServiceConnection): |         if isinstance(service_connection, DockerServiceConnection): | ||||||
|             return ProxyDockerController(outpost, service_connection) |             return ProxyDockerController | ||||||
|         if isinstance(service_connection, KubernetesServiceConnection): |         if isinstance(service_connection, KubernetesServiceConnection): | ||||||
|             return ProxyKubernetesController(outpost, service_connection) |             return ProxyKubernetesController | ||||||
|     if outpost.type == OutpostType.LDAP: |     if outpost.type == OutpostType.LDAP: | ||||||
|         if isinstance(service_connection, DockerServiceConnection): |         if isinstance(service_connection, DockerServiceConnection): | ||||||
|             return LDAPDockerController(outpost, service_connection) |             return LDAPDockerController | ||||||
|         if isinstance(service_connection, KubernetesServiceConnection): |         if isinstance(service_connection, KubernetesServiceConnection): | ||||||
|             return LDAPKubernetesController(outpost, service_connection) |             return LDAPKubernetesController | ||||||
|     return None |     return None | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -71,7 +73,12 @@ def outpost_service_connection_state(connection_pk: Any): | |||||||
|     ) |     ) | ||||||
|     if not connection: |     if not connection: | ||||||
|         return |         return | ||||||
|     state = connection.fetch_state() |     if isinstance(connection, DockerServiceConnection): | ||||||
|  |         cls = DockerClient | ||||||
|  |     if isinstance(connection, KubernetesServiceConnection): | ||||||
|  |         cls = KubernetesClient | ||||||
|  |     with cls(connection) as client: | ||||||
|  |         state = client.fetch_state() | ||||||
|     cache.set(connection.state_key, state, timeout=None) |     cache.set(connection.state_key, state, timeout=None) | ||||||
|  |  | ||||||
|  |  | ||||||
| @ -105,20 +112,24 @@ def outpost_controller( | |||||||
|     logs = [] |     logs = [] | ||||||
|     if from_cache: |     if from_cache: | ||||||
|         outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk) |         outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk) | ||||||
|  |         LOGGER.debug("Getting outpost from cache to delete") | ||||||
|     else: |     else: | ||||||
|         outpost: Outpost = Outpost.objects.filter(pk=outpost_pk).first() |         outpost: Outpost = Outpost.objects.filter(pk=outpost_pk).first() | ||||||
|  |         LOGGER.debug("Getting outpost from DB") | ||||||
|     if not outpost: |     if not outpost: | ||||||
|  |         LOGGER.warning("No outpost") | ||||||
|         return |         return | ||||||
|     self.set_uid(slugify(outpost.name)) |     self.set_uid(slugify(outpost.name)) | ||||||
|     try: |     try: | ||||||
|         controller = controller_for_outpost(outpost) |         controller_type = controller_for_outpost(outpost) | ||||||
|         if not controller: |         if not controller_type: | ||||||
|             return |             return | ||||||
|         logs = getattr(controller, f"{action}_with_logs")() |         with controller_type(outpost, outpost.service_connection) as controller: | ||||||
|         LOGGER.debug("---------------Outpost Controller logs starting----------------") |             logs = getattr(controller, f"{action}_with_logs")() | ||||||
|         for log in logs: |             LOGGER.debug("---------------Outpost Controller logs starting----------------") | ||||||
|             LOGGER.debug(log) |             for log in logs: | ||||||
|         LOGGER.debug("-----------------Outpost Controller logs end-------------------") |                 LOGGER.debug(log) | ||||||
|  |             LOGGER.debug("-----------------Outpost Controller logs end-------------------") | ||||||
|     except (ControllerException, ServiceConnectionInvalid) as exc: |     except (ControllerException, ServiceConnectionInvalid) as exc: | ||||||
|         self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc)) |         self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc)) | ||||||
|     else: |     else: | ||||||
|  | |||||||
							
								
								
									
										124
									
								
								authentik/outposts/tests/test_controller_docker.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								authentik/outposts/tests/test_controller_docker.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,124 @@ | |||||||
|  | """Docker controller tests""" | ||||||
|  | from django.test import TestCase | ||||||
|  | from docker.models.containers import Container | ||||||
|  |  | ||||||
|  | from authentik.managed.manager import ObjectManager | ||||||
|  | from authentik.outposts.controllers.base import ControllerException | ||||||
|  | from authentik.outposts.controllers.docker import DockerController | ||||||
|  | from authentik.outposts.managed import MANAGED_OUTPOST | ||||||
|  | from authentik.outposts.models import DockerServiceConnection, Outpost, OutpostType | ||||||
|  | from authentik.providers.proxy.controllers.docker import ProxyDockerController | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DockerControllerTests(TestCase): | ||||||
|  |     """Docker controller tests""" | ||||||
|  |  | ||||||
|  |     def setUp(self) -> None: | ||||||
|  |         self.outpost = Outpost.objects.create( | ||||||
|  |             name="test", | ||||||
|  |             type=OutpostType.PROXY, | ||||||
|  |         ) | ||||||
|  |         self.integration = DockerServiceConnection(name="test") | ||||||
|  |         ObjectManager().run() | ||||||
|  |  | ||||||
|  |     def test_init_managed(self): | ||||||
|  |         """Docker controller shouldn't do anything for managed outpost""" | ||||||
|  |         controller = DockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         self.assertIsNone(controller.up()) | ||||||
|  |         self.assertIsNone(controller.down()) | ||||||
|  |  | ||||||
|  |     def test_init_invalid(self): | ||||||
|  |         """Ensure init fails with invalid client""" | ||||||
|  |         with self.assertRaises(ControllerException): | ||||||
|  |             DockerController(self.outpost, self.integration) | ||||||
|  |  | ||||||
|  |     def test_env_valid(self): | ||||||
|  |         """Test environment check""" | ||||||
|  |         controller = DockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         env = [f"{key}={value}" for key, value in controller._get_env().items()] | ||||||
|  |         container = Container(attrs={"Config": {"Env": env}}) | ||||||
|  |         self.assertFalse(controller._comp_env(container)) | ||||||
|  |  | ||||||
|  |     def test_env_invalid(self): | ||||||
|  |         """Test environment check""" | ||||||
|  |         controller = DockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         container = Container(attrs={"Config": {"Env": []}}) | ||||||
|  |         self.assertTrue(controller._comp_env(container)) | ||||||
|  |  | ||||||
|  |     def test_label_valid(self): | ||||||
|  |         """Test label check""" | ||||||
|  |         controller = DockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         container = Container(attrs={"Config": {"Labels": controller._get_labels()}}) | ||||||
|  |         self.assertFalse(controller._comp_labels(container)) | ||||||
|  |  | ||||||
|  |     def test_label_invalid(self): | ||||||
|  |         """Test label check""" | ||||||
|  |         controller = DockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         container = Container(attrs={"Config": {"Labels": {}}}) | ||||||
|  |         self.assertTrue(controller._comp_labels(container)) | ||||||
|  |         container = Container(attrs={"Config": {"Labels": {"io.goauthentik.outpost-uuid": "foo"}}}) | ||||||
|  |         self.assertTrue(controller._comp_labels(container)) | ||||||
|  |  | ||||||
|  |     def test_port_valid(self): | ||||||
|  |         """Test port check""" | ||||||
|  |         controller = ProxyDockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         container = Container( | ||||||
|  |             attrs={ | ||||||
|  |                 "NetworkSettings": { | ||||||
|  |                     "Ports": { | ||||||
|  |                         "9000/tcp": [{"HostIp": "", "HostPort": "9000"}], | ||||||
|  |                         "9443/tcp": [{"HostIp": "", "HostPort": "9443"}], | ||||||
|  |                     } | ||||||
|  |                 }, | ||||||
|  |                 "State": "", | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |         with self.settings(TEST=False): | ||||||
|  |             self.assertFalse(controller._comp_ports(container)) | ||||||
|  |             container.attrs["State"] = "running" | ||||||
|  |             self.assertFalse(controller._comp_ports(container)) | ||||||
|  |  | ||||||
|  |     def test_port_invalid(self): | ||||||
|  |         """Test port check""" | ||||||
|  |         controller = ProxyDockerController( | ||||||
|  |             Outpost.objects.filter(managed=MANAGED_OUTPOST).first(), self.integration | ||||||
|  |         ) | ||||||
|  |         container_no_ports = Container( | ||||||
|  |             attrs={"NetworkSettings": {"Ports": None}, "State": "running"} | ||||||
|  |         ) | ||||||
|  |         container_missing_port = Container( | ||||||
|  |             attrs={ | ||||||
|  |                 "NetworkSettings": { | ||||||
|  |                     "Ports": { | ||||||
|  |                         "9443/tcp": [{"HostIp": "", "HostPort": "9443"}], | ||||||
|  |                     } | ||||||
|  |                 }, | ||||||
|  |                 "State": "running", | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |         container_mismatched_host = Container( | ||||||
|  |             attrs={ | ||||||
|  |                 "NetworkSettings": { | ||||||
|  |                     "Ports": { | ||||||
|  |                         "9443/tcp": [{"HostIp": "", "HostPort": "123"}], | ||||||
|  |                     } | ||||||
|  |                 }, | ||||||
|  |                 "State": "running", | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |         with self.settings(TEST=False): | ||||||
|  |             self.assertFalse(controller._comp_ports(container_no_ports)) | ||||||
|  |             self.assertTrue(controller._comp_ports(container_missing_port)) | ||||||
|  |             self.assertTrue(controller._comp_ports(container_mismatched_host)) | ||||||
| @ -90,7 +90,8 @@ class PolicyEngine: | |||||||
|     def build(self) -> "PolicyEngine": |     def build(self) -> "PolicyEngine": | ||||||
|         """Build wrapper which monitors performance""" |         """Build wrapper which monitors performance""" | ||||||
|         with Hub.current.start_span( |         with Hub.current.start_span( | ||||||
|             op="policy.engine.build" |             op="authentik.policy.engine.build", | ||||||
|  |             description=self.__pbm, | ||||||
|         ) as span, HIST_POLICIES_BUILD_TIME.labels( |         ) as span, HIST_POLICIES_BUILD_TIME.labels( | ||||||
|             object_name=self.__pbm, |             object_name=self.__pbm, | ||||||
|             object_type=f"{self.__pbm._meta.app_label}.{self.__pbm._meta.model_name}", |             object_type=f"{self.__pbm._meta.app_label}.{self.__pbm._meta.model_name}", | ||||||
|  | |||||||
| @ -66,6 +66,7 @@ class Migration(migrations.Migration): | |||||||
|                             ("source_linked", "Source Linked"), |                             ("source_linked", "Source Linked"), | ||||||
|                             ("impersonation_started", "Impersonation Started"), |                             ("impersonation_started", "Impersonation Started"), | ||||||
|                             ("impersonation_ended", "Impersonation Ended"), |                             ("impersonation_ended", "Impersonation Ended"), | ||||||
|  |                             ("flow_execution", "Flow Execution"), | ||||||
|                             ("policy_execution", "Policy Execution"), |                             ("policy_execution", "Policy Execution"), | ||||||
|                             ("policy_exception", "Policy Exception"), |                             ("policy_exception", "Policy Exception"), | ||||||
|                             ("property_mapping_exception", "Property Mapping Exception"), |                             ("property_mapping_exception", "Property Mapping Exception"), | ||||||
|  | |||||||
| @ -74,4 +74,4 @@ class TestExpressionPolicyAPI(APITestCase): | |||||||
|         expr = "return True" |         expr = "return True" | ||||||
|         self.assertEqual(ExpressionPolicySerializer().validate_expression(expr), expr) |         self.assertEqual(ExpressionPolicySerializer().validate_expression(expr), expr) | ||||||
|         with self.assertRaises(ValidationError): |         with self.assertRaises(ValidationError): | ||||||
|             print(ExpressionPolicySerializer().validate_expression("/")) |             ExpressionPolicySerializer().validate_expression("/") | ||||||
|  | |||||||
| @ -13,6 +13,7 @@ class PasswordPolicySerializer(PolicySerializer): | |||||||
|         model = PasswordPolicy |         model = PasswordPolicy | ||||||
|         fields = PolicySerializer.Meta.fields + [ |         fields = PolicySerializer.Meta.fields + [ | ||||||
|             "password_field", |             "password_field", | ||||||
|  |             "amount_digits", | ||||||
|             "amount_uppercase", |             "amount_uppercase", | ||||||
|             "amount_lowercase", |             "amount_lowercase", | ||||||
|             "amount_symbols", |             "amount_symbols", | ||||||
|  | |||||||
| @ -0,0 +1,38 @@ | |||||||
|  | # Generated by Django 4.0 on 2021-12-18 14:54 | ||||||
|  |  | ||||||
|  | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|  |     dependencies = [ | ||||||
|  |         ("authentik_policies_password", "0002_passwordpolicy_password_field"), | ||||||
|  |     ] | ||||||
|  |  | ||||||
|  |     operations = [ | ||||||
|  |         migrations.AddField( | ||||||
|  |             model_name="passwordpolicy", | ||||||
|  |             name="amount_digits", | ||||||
|  |             field=models.PositiveIntegerField(default=0), | ||||||
|  |         ), | ||||||
|  |         migrations.AlterField( | ||||||
|  |             model_name="passwordpolicy", | ||||||
|  |             name="amount_lowercase", | ||||||
|  |             field=models.PositiveIntegerField(default=0), | ||||||
|  |         ), | ||||||
|  |         migrations.AlterField( | ||||||
|  |             model_name="passwordpolicy", | ||||||
|  |             name="amount_symbols", | ||||||
|  |             field=models.PositiveIntegerField(default=0), | ||||||
|  |         ), | ||||||
|  |         migrations.AlterField( | ||||||
|  |             model_name="passwordpolicy", | ||||||
|  |             name="amount_uppercase", | ||||||
|  |             field=models.PositiveIntegerField(default=0), | ||||||
|  |         ), | ||||||
|  |         migrations.AlterField( | ||||||
|  |             model_name="passwordpolicy", | ||||||
|  |             name="length_min", | ||||||
|  |             field=models.PositiveIntegerField(default=0), | ||||||
|  |         ), | ||||||
|  |     ] | ||||||
| @ -13,6 +13,7 @@ from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT | |||||||
| LOGGER = get_logger() | LOGGER = get_logger() | ||||||
| RE_LOWER = re.compile("[a-z]") | RE_LOWER = re.compile("[a-z]") | ||||||
| RE_UPPER = re.compile("[A-Z]") | RE_UPPER = re.compile("[A-Z]") | ||||||
|  | RE_DIGITS = re.compile("[0-9]") | ||||||
|  |  | ||||||
|  |  | ||||||
| class PasswordPolicy(Policy): | class PasswordPolicy(Policy): | ||||||
| @ -23,10 +24,11 @@ class PasswordPolicy(Policy): | |||||||
|         help_text=_("Field key to check, field keys defined in Prompt stages are available."), |         help_text=_("Field key to check, field keys defined in Prompt stages are available."), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     amount_uppercase = models.IntegerField(default=0) |     amount_digits = models.PositiveIntegerField(default=0) | ||||||
|     amount_lowercase = models.IntegerField(default=0) |     amount_uppercase = models.PositiveIntegerField(default=0) | ||||||
|     amount_symbols = models.IntegerField(default=0) |     amount_lowercase = models.PositiveIntegerField(default=0) | ||||||
|     length_min = models.IntegerField(default=0) |     amount_symbols = models.PositiveIntegerField(default=0) | ||||||
|  |     length_min = models.PositiveIntegerField(default=0) | ||||||
|     symbol_charset = models.TextField(default=r"!\"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ ") |     symbol_charset = models.TextField(default=r"!\"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ ") | ||||||
|     error_message = models.TextField() |     error_message = models.TextField() | ||||||
|  |  | ||||||
| @ -40,6 +42,7 @@ class PasswordPolicy(Policy): | |||||||
|     def component(self) -> str: |     def component(self) -> str: | ||||||
|         return "ak-policy-password-form" |         return "ak-policy-password-form" | ||||||
|  |  | ||||||
|  |     # pylint: disable=too-many-return-statements | ||||||
|     def passes(self, request: PolicyRequest) -> PolicyResult: |     def passes(self, request: PolicyRequest) -> PolicyResult: | ||||||
|         if ( |         if ( | ||||||
|             self.password_field not in request.context |             self.password_field not in request.context | ||||||
| @ -62,6 +65,9 @@ class PasswordPolicy(Policy): | |||||||
|             LOGGER.debug("password failed", reason="length") |             LOGGER.debug("password failed", reason="length") | ||||||
|             return PolicyResult(False, self.error_message) |             return PolicyResult(False, self.error_message) | ||||||
|  |  | ||||||
|  |         if self.amount_digits > 0 and len(RE_DIGITS.findall(password)) < self.amount_digits: | ||||||
|  |             LOGGER.debug("password failed", reason="amount_digits") | ||||||
|  |             return PolicyResult(False, self.error_message) | ||||||
|         if self.amount_lowercase > 0 and len(RE_LOWER.findall(password)) < self.amount_lowercase: |         if self.amount_lowercase > 0 and len(RE_LOWER.findall(password)) < self.amount_lowercase: | ||||||
|             LOGGER.debug("password failed", reason="amount_lowercase") |             LOGGER.debug("password failed", reason="amount_lowercase") | ||||||
|             return PolicyResult(False, self.error_message) |             return PolicyResult(False, self.error_message) | ||||||
|  | |||||||
| @ -1,16 +1,14 @@ | |||||||
| """Password flow tests""" | """Password flow tests""" | ||||||
| from django.urls.base import reverse | from django.urls.base import reverse | ||||||
| from django.utils.encoding import force_str |  | ||||||
| from rest_framework.test import APITestCase |  | ||||||
|  |  | ||||||
| from authentik.core.models import User | from authentik.core.models import User | ||||||
| from authentik.flows.challenge import ChallengeTypes |  | ||||||
| from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding | from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding | ||||||
|  | from authentik.flows.tests import FlowTestCase | ||||||
| from authentik.policies.password.models import PasswordPolicy | from authentik.policies.password.models import PasswordPolicy | ||||||
| from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage | from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestPasswordPolicyFlow(APITestCase): | class TestPasswordPolicyFlow(FlowTestCase): | ||||||
|     """Test Password Policy""" |     """Test Password Policy""" | ||||||
|  |  | ||||||
|     def setUp(self) -> None: |     def setUp(self) -> None: | ||||||
| @ -53,29 +51,22 @@ class TestPasswordPolicyFlow(APITestCase): | |||||||
|             {"password": "akadmin"}, |             {"password": "akadmin"}, | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(response.status_code, 200) |         self.assertEqual(response.status_code, 200) | ||||||
|         self.assertJSONEqual( |         self.assertStageResponse( | ||||||
|             force_str(response.content), |             response, | ||||||
|             { |             self.flow, | ||||||
|                 "component": "ak-stage-prompt", |             component="ak-stage-prompt", | ||||||
|                 "fields": [ |             fields=[ | ||||||
|                     { |                 { | ||||||
|                         "field_key": "password", |                     "field_key": "password", | ||||||
|                         "label": "PASSWORD_LABEL", |                     "label": "PASSWORD_LABEL", | ||||||
|                         "order": 0, |                     "order": 0, | ||||||
|                         "placeholder": "PASSWORD_PLACEHOLDER", |                     "placeholder": "PASSWORD_PLACEHOLDER", | ||||||
|                         "required": True, |                     "required": True, | ||||||
|                         "type": "password", |                     "type": "password", | ||||||
|                         "sub_text": "", |                     "sub_text": "", | ||||||
|                     } |                 } | ||||||
|                 ], |             ], | ||||||
|                 "flow_info": { |             response_errors={ | ||||||
|                     "background": self.flow.background_url, |                 "non_field_errors": [{"code": "invalid", "string": self.policy.error_message}] | ||||||
|                     "cancel_url": reverse("authentik_flows:cancel"), |  | ||||||
|                     "title": "", |  | ||||||
|                 }, |  | ||||||
|                 "response_errors": { |  | ||||||
|                     "non_field_errors": [{"code": "invalid", "string": self.policy.error_message}] |  | ||||||
|                 }, |  | ||||||
|                 "type": ChallengeTypes.NATIVE.value, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|  | |||||||
| @ -13,6 +13,7 @@ class TestPasswordPolicy(TestCase): | |||||||
|     def setUp(self) -> None: |     def setUp(self) -> None: | ||||||
|         self.policy = PasswordPolicy.objects.create( |         self.policy = PasswordPolicy.objects.create( | ||||||
|             name="test_false", |             name="test_false", | ||||||
|  |             amount_digits=1, | ||||||
|             amount_uppercase=1, |             amount_uppercase=1, | ||||||
|             amount_lowercase=2, |             amount_lowercase=2, | ||||||
|             amount_symbols=3, |             amount_symbols=3, | ||||||
| @ -38,7 +39,7 @@ class TestPasswordPolicy(TestCase): | |||||||
|     def test_failed_lowercase(self): |     def test_failed_lowercase(self): | ||||||
|         """not enough lowercase""" |         """not enough lowercase""" | ||||||
|         request = PolicyRequest(get_anonymous_user()) |         request = PolicyRequest(get_anonymous_user()) | ||||||
|         request.context["password"] = "TTTTTTTTTTTTTTTTTTTTTTTe"  # nosec |         request.context["password"] = "1TTTTTTTTTTTTTTTTTTTTTTe"  # nosec | ||||||
|         result: PolicyResult = self.policy.passes(request) |         result: PolicyResult = self.policy.passes(request) | ||||||
|         self.assertFalse(result.passing) |         self.assertFalse(result.passing) | ||||||
|         self.assertEqual(result.messages, ("test message",)) |         self.assertEqual(result.messages, ("test message",)) | ||||||
| @ -46,15 +47,23 @@ class TestPasswordPolicy(TestCase): | |||||||
|     def test_failed_uppercase(self): |     def test_failed_uppercase(self): | ||||||
|         """not enough uppercase""" |         """not enough uppercase""" | ||||||
|         request = PolicyRequest(get_anonymous_user()) |         request = PolicyRequest(get_anonymous_user()) | ||||||
|         request.context["password"] = "tttttttttttttttttttttttE"  # nosec |         request.context["password"] = "1tttttttttttttttttttttE"  # nosec | ||||||
|         result: PolicyResult = self.policy.passes(request) |         result: PolicyResult = self.policy.passes(request) | ||||||
|         self.assertFalse(result.passing) |         self.assertFalse(result.passing) | ||||||
|         self.assertEqual(result.messages, ("test message",)) |         self.assertEqual(result.messages, ("test message",)) | ||||||
|  |  | ||||||
|     def test_failed_symbols(self): |     def test_failed_symbols(self): | ||||||
|         """not enough uppercase""" |         """not enough symbols""" | ||||||
|         request = PolicyRequest(get_anonymous_user()) |         request = PolicyRequest(get_anonymous_user()) | ||||||
|         request.context["password"] = "TETETETETETETETETETETETETe!!!"  # nosec |         request.context["password"] = "1ETETETETETETETETETETETETe!!!"  # nosec | ||||||
|  |         result: PolicyResult = self.policy.passes(request) | ||||||
|  |         self.assertFalse(result.passing) | ||||||
|  |         self.assertEqual(result.messages, ("test message",)) | ||||||
|  |  | ||||||
|  |     def test_failed_digits(self): | ||||||
|  |         """not enough digits""" | ||||||
|  |         request = PolicyRequest(get_anonymous_user()) | ||||||
|  |         request.context["password"] = "TETETETETETETETETETETE1e!!!"  # nosec | ||||||
|         result: PolicyResult = self.policy.passes(request) |         result: PolicyResult = self.policy.passes(request) | ||||||
|         self.assertFalse(result.passing) |         self.assertFalse(result.passing) | ||||||
|         self.assertEqual(result.messages, ("test message",)) |         self.assertEqual(result.messages, ("test message",)) | ||||||
| @ -62,7 +71,7 @@ class TestPasswordPolicy(TestCase): | |||||||
|     def test_true(self): |     def test_true(self): | ||||||
|         """Positive password case""" |         """Positive password case""" | ||||||
|         request = PolicyRequest(get_anonymous_user()) |         request = PolicyRequest(get_anonymous_user()) | ||||||
|         request.context["password"] = generate_key() + "ee!!!"  # nosec |         request.context["password"] = generate_key() + "1ee!!!"  # nosec | ||||||
|         result: PolicyResult = self.policy.passes(request) |         result: PolicyResult = self.policy.passes(request) | ||||||
|         self.assertTrue(result.passing) |         self.assertTrue(result.passing) | ||||||
|         self.assertEqual(result.messages, tuple()) |         self.assertEqual(result.messages, tuple()) | ||||||
|  | |||||||
| @ -130,7 +130,7 @@ class PolicyProcess(PROCESS_CLASS): | |||||||
|     def profiling_wrapper(self): |     def profiling_wrapper(self): | ||||||
|         """Run with profiling enabled""" |         """Run with profiling enabled""" | ||||||
|         with Hub.current.start_span( |         with Hub.current.start_span( | ||||||
|             op="policy.process.execute", |             op="authentik.policy.process.execute", | ||||||
|         ) as span, HIST_POLICIES_EXECUTION_TIME.labels( |         ) as span, HIST_POLICIES_EXECUTION_TIME.labels( | ||||||
|             binding_order=self.binding.order, |             binding_order=self.binding.order, | ||||||
|             binding_target_type=self.binding.target_type, |             binding_target_type=self.binding.target_type, | ||||||
|  | |||||||
| @ -1,11 +1,11 @@ | |||||||
| """Source API Views""" | """Reputation policy API Views""" | ||||||
| from rest_framework import mixins | from rest_framework import mixins | ||||||
| from rest_framework.serializers import ModelSerializer | from rest_framework.serializers import ModelSerializer | ||||||
| from rest_framework.viewsets import GenericViewSet, ModelViewSet | from rest_framework.viewsets import GenericViewSet, ModelViewSet | ||||||
|  |  | ||||||
| from authentik.core.api.used_by import UsedByMixin | from authentik.core.api.used_by import UsedByMixin | ||||||
| from authentik.policies.api.policies import PolicySerializer | from authentik.policies.api.policies import PolicySerializer | ||||||
| from authentik.policies.reputation.models import IPReputation, ReputationPolicy, UserReputation | from authentik.policies.reputation.models import Reputation, ReputationPolicy | ||||||
|  |  | ||||||
|  |  | ||||||
| class ReputationPolicySerializer(PolicySerializer): | class ReputationPolicySerializer(PolicySerializer): | ||||||
| @ -29,59 +29,32 @@ class ReputationPolicyViewSet(UsedByMixin, ModelViewSet): | |||||||
|     ordering = ["name"] |     ordering = ["name"] | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPReputationSerializer(ModelSerializer): | class ReputationSerializer(ModelSerializer): | ||||||
|     """IPReputation Serializer""" |     """Reputation Serializer""" | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         model = IPReputation |         model = Reputation | ||||||
|         fields = [ |         fields = [ | ||||||
|             "pk", |             "pk", | ||||||
|  |             "identifier", | ||||||
|             "ip", |             "ip", | ||||||
|  |             "ip_geo_data", | ||||||
|             "score", |             "score", | ||||||
|             "updated", |             "updated", | ||||||
|         ] |         ] | ||||||
|  |  | ||||||
|  |  | ||||||
| class IPReputationViewSet( | class ReputationViewSet( | ||||||
|     mixins.RetrieveModelMixin, |     mixins.RetrieveModelMixin, | ||||||
|     mixins.DestroyModelMixin, |     mixins.DestroyModelMixin, | ||||||
|     UsedByMixin, |     UsedByMixin, | ||||||
|     mixins.ListModelMixin, |     mixins.ListModelMixin, | ||||||
|     GenericViewSet, |     GenericViewSet, | ||||||
| ): | ): | ||||||
|     """IPReputation Viewset""" |     """Reputation Viewset""" | ||||||
|  |  | ||||||
|     queryset = IPReputation.objects.all() |     queryset = Reputation.objects.all() | ||||||
|     serializer_class = IPReputationSerializer |     serializer_class = ReputationSerializer | ||||||
|     search_fields = ["ip", "score"] |     search_fields = ["identifier", "ip", "score"] | ||||||
|     filterset_fields = ["ip", "score"] |     filterset_fields = ["identifier", "ip", "score"] | ||||||
|     ordering = ["ip"] |     ordering = ["ip"] | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserReputationSerializer(ModelSerializer): |  | ||||||
|     """UserReputation Serializer""" |  | ||||||
|  |  | ||||||
|     class Meta: |  | ||||||
|         model = UserReputation |  | ||||||
|         fields = [ |  | ||||||
|             "pk", |  | ||||||
|             "username", |  | ||||||
|             "score", |  | ||||||
|             "updated", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserReputationViewSet( |  | ||||||
|     mixins.RetrieveModelMixin, |  | ||||||
|     mixins.DestroyModelMixin, |  | ||||||
|     UsedByMixin, |  | ||||||
|     mixins.ListModelMixin, |  | ||||||
|     GenericViewSet, |  | ||||||
| ): |  | ||||||
|     """UserReputation Viewset""" |  | ||||||
|  |  | ||||||
|     queryset = UserReputation.objects.all() |  | ||||||
|     serializer_class = UserReputationSerializer |  | ||||||
|     search_fields = ["username", "score"] |  | ||||||
|     filterset_fields = ["username", "score"] |  | ||||||
|     ordering = ["username"] |  | ||||||
|  | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	