Compare commits
	
		
			167 Commits
		
	
	
		
			version/20
			...
			version/20
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 2d5c45543b | |||
| 9d476a42d1 | |||
| 2c816e6162 | |||
| dbcb4d46ba | |||
| 6600da7d98 | |||
| a265dd54cc | |||
| a603f42cc0 | |||
| d9a788aac8 | |||
| 7c6185b581 | |||
| 41a1305555 | |||
| 75f252b530 | |||
| a9519a4a68 | |||
| bf4cbb25fe | |||
| a925418f60 | |||
| ffd61d0e60 | |||
| 71d112bdcf | |||
| c58fe18b97 | |||
| 590c7f4c9d | |||
| 56f1204c9b | |||
| 349a5b2d00 | |||
| 63e3667e82 | |||
| 92f2a82c03 | |||
| dcf074650e | |||
| 5a465fbc36 | |||
| 7cd80a903a | |||
| dd00351bc7 | |||
| 5fca7d11b8 | |||
| 0ff59636f7 | |||
| e5ebe390d2 | |||
| b66626f9c4 | |||
| 23123c43ee | |||
| 8ce918d527 | |||
| 45c1a603e7 | |||
| 583271d5ed | |||
| 176360fdd7 | |||
| 8d2a3b67b9 | |||
| d0d3072c50 | |||
| 34e2bbc41d | |||
| ea2dbb2f33 | |||
| c55f2ad10a | |||
| 2cde40aeee | |||
| a30b32fbbf | |||
| 1745306cc6 | |||
| 8925787a13 | |||
| 968b7ec17a | |||
| 6600d5bf69 | |||
| a4278833d8 | |||
| 942905b9b1 | |||
| 81056c3889 | |||
| 36b694fc41 | |||
| 2d9f216658 | |||
| 8d7bb7da17 | |||
| 965db6eaf5 | |||
| 9bdd6f23a4 | |||
| 675ad7710c | |||
| 9939db13c3 | |||
| 03e134b296 | |||
| 465750276c | |||
| 9b13191646 | |||
| 634ea61b50 | |||
| 0fcb4936a2 | |||
| 934e62d5be | |||
| c5e9197b19 | |||
| 0b7ebf0e07 | |||
| ddca8ef3ca | |||
| 709581f5a8 | |||
| 72e41c03f5 | |||
| 40503d06b7 | |||
| 1df8790050 | |||
| 3c23ad340f | |||
| f9f2e00913 | |||
| 8362507bdf | |||
| a2181c3bf0 | |||
| a07ded0dae | |||
| 3b0b9301ee | |||
| 919f293fc7 | |||
| c4df2e5a50 | |||
| 4d1500e0f3 | |||
| 281bd4c69a | |||
| e4678aa032 | |||
| ff1c4d555a | |||
| 4a3e34d40a | |||
| 6939898bbe | |||
| 549607c5ed | |||
| f61acdfbfd | |||
| e3572bad76 | |||
| 8f99891a9d | |||
| 99d5262d41 | |||
| 97a3c2d88b | |||
| e91ff4566d | |||
| dc942b2f4c | |||
| a3fccbdaff | |||
| bdf9f26d07 | |||
| 901cea1453 | |||
| 37b57ac28f | |||
| e9aa37ba67 | |||
| 9a0aa4c79b | |||
| 34ab68a169 | |||
| 52cf4890cf | |||
| 8e5d03cb86 | |||
| 2190fa555b | |||
| ae1edde17b | |||
| 3ad1c3f212 | |||
| 3665e2fefa | |||
| 3dbe35cf9e | |||
| 65ec444e52 | |||
| c7f0ea8a4b | |||
| 0620324702 | |||
| 5a802bcf83 | |||
| 00c8054893 | |||
| dc2538f59d | |||
| 5a0e78c698 | |||
| fd4e8a59f4 | |||
| dd1a6a81c8 | |||
| 84dfbcaaae | |||
| e649e9fb03 | |||
| 266ef66a6f | |||
| 842fdb0b0c | |||
| a270a84aae | |||
| 36f7cad23b | |||
| e441ac1e43 | |||
| 24f2932777 | |||
| a6c6f22221 | |||
| abd5db8ad4 | |||
| 124ce80694 | |||
| 4352960f83 | |||
| 4e2443d60b | |||
| 34a8408a4f | |||
| 17b65adcc5 | |||
| 6f8d129dea | |||
| 59f339beda | |||
| ce1c400022 | |||
| c99afe0ad4 | |||
| ff9ff18c11 | |||
| 4d11d82c6e | |||
| b4d750174f | |||
| fd44765ff4 | |||
| 190ebb27e4 | |||
| fb3c04d0c7 | |||
| 3ba8de61e0 | |||
| d4d2be84a3 | |||
| 96ea7ae09c | |||
| 172bfceb31 | |||
| 932b19999e | |||
| 0f1cc86e71 | |||
| 788fd00390 | |||
| f602e202b8 | |||
| 9b60fcb08b | |||
| a293a14f2a | |||
| 65bfa589eb | |||
| defca51d24 | |||
| d862028134 | |||
| c19d7c37aa | |||
| 6fb3102d25 | |||
| 51e3453dca | |||
| 6f58fdf158 | |||
| 5d4051f547 | |||
| 219b8d1a57 | |||
| c7d4e69669 | |||
| cd629dfbaa | |||
| 8eaaaae2a7 | |||
| 3d0a853449 | |||
| c2f8ff55cf | |||
| 4b52697cfe | |||
| 80fae44f47 | |||
| afd7af557d | |||
| d4493c0ee9 | 
| @ -1,5 +1,5 @@ | ||||
| [bumpversion] | ||||
| current_version = 2021.5.1-rc6 | ||||
| current_version = 2021.5.4 | ||||
| tag = True | ||||
| commit = True | ||||
| parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*) | ||||
| @ -31,8 +31,6 @@ values = | ||||
|  | ||||
| [bumpversion:file:web/src/constants.ts] | ||||
|  | ||||
| [bumpversion:file:web/nginx.conf] | ||||
|  | ||||
| [bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md] | ||||
|  | ||||
| [bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md] | ||||
|  | ||||
							
								
								
									
										34
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										34
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @ -22,6 +22,12 @@ jobs: | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v1 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: prepare ts api client | ||||
|         run: | | ||||
|           docker run --rm -v $(pwd):/local openapitools/openapi-generator-cli generate -i /local/swagger.yaml -g typescript-fetch -o /local/web/api --additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0 | ||||
| @ -30,9 +36,9 @@ jobs: | ||||
|         with: | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik:2021.5.1-rc6, | ||||
|             beryju/authentik:2021.5.4, | ||||
|             beryju/authentik:latest, | ||||
|             ghcr.io/goauthentik/server:2021.5.1-rc6, | ||||
|             ghcr.io/goauthentik/server:2021.5.4, | ||||
|             ghcr.io/goauthentik/server:latest | ||||
|           platforms: linux/amd64,linux/arm64 | ||||
|           context: . | ||||
| @ -58,14 +64,20 @@ jobs: | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v1 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Building Docker Image | ||||
|         uses: docker/build-push-action@v2 | ||||
|         with: | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik-proxy:2021.5.1-rc6, | ||||
|             beryju/authentik-proxy:2021.5.4, | ||||
|             beryju/authentik-proxy:latest, | ||||
|             ghcr.io/goauthentik/proxy:2021.5.1-rc6, | ||||
|             ghcr.io/goauthentik/proxy:2021.5.4, | ||||
|             ghcr.io/goauthentik/proxy:latest | ||||
|           context: outpost/ | ||||
|           file: outpost/proxy.Dockerfile | ||||
| @ -92,14 +104,20 @@ jobs: | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKER_USERNAME }} | ||||
|           password: ${{ secrets.DOCKER_PASSWORD }} | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@v1 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|           password: ${{ secrets.GITHUB_TOKEN }} | ||||
|       - name: Building Docker Image | ||||
|         uses: docker/build-push-action@v2 | ||||
|         with: | ||||
|           push: ${{ github.event_name == 'release' }} | ||||
|           tags: | | ||||
|             beryju/authentik-ldap:2021.5.1-rc6, | ||||
|             beryju/authentik-ldap:2021.5.4, | ||||
|             beryju/authentik-ldap:latest, | ||||
|             ghcr.io/goauthentik/ldap:2021.5.1-rc6, | ||||
|             ghcr.io/goauthentik/ldap:2021.5.4, | ||||
|             ghcr.io/goauthentik/ldap:latest | ||||
|           context: outpost/ | ||||
|           file: outpost/ldap.Dockerfile | ||||
| @ -121,7 +139,7 @@ jobs: | ||||
|           docker-compose pull -q | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik" | ||||
|           docker-compose run -u root --entrypoint /bin/bash server -c "apt-get update && apt-get install -y --no-install-recommends git && pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik" | ||||
|   sentry-release: | ||||
|     if: ${{ github.event_name == 'release' }} | ||||
|     needs: | ||||
| @ -137,5 +155,5 @@ jobs: | ||||
|           SENTRY_PROJECT: authentik | ||||
|           SENTRY_URL: https://sentry.beryju.org | ||||
|         with: | ||||
|           version: authentik@2021.5.1-rc6 | ||||
|           version: authentik@2021.5.4 | ||||
|           environment: beryjuorg-prod | ||||
|  | ||||
							
								
								
									
										6
									
								
								.github/workflows/tag.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/tag.yml
									
									
									
									
										vendored
									
									
								
							| @ -27,17 +27,17 @@ jobs: | ||||
|             -f Dockerfile . | ||||
|           docker-compose up --no-start | ||||
|           docker-compose start postgresql redis | ||||
|           docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik" | ||||
|           docker-compose run -u root --entrypoint /bin/bash server -c "apt-get update && apt-get install -y --no-install-recommends git && pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik" | ||||
|       - name: Extract version number | ||||
|         id: get_version | ||||
|         uses: actions/github-script@0.2.0 | ||||
|         uses: actions/github-script@v4.0.2 | ||||
|         with: | ||||
|           github-token: ${{ secrets.GITHUB_TOKEN }} | ||||
|           script: | | ||||
|             return context.payload.ref.replace(/\/refs\/tags\/version\//, ''); | ||||
|       - name: Create Release | ||||
|         id: create_release | ||||
|         uses: actions/create-release@v1.0.0 | ||||
|         uses: actions/create-release@v1.1.4 | ||||
|         env: | ||||
|           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||||
|         with: | ||||
|  | ||||
							
								
								
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								Dockerfile
									
									
									
									
									
								
							| @ -48,24 +48,17 @@ ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| RUN apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends curl ca-certificates gnupg && \ | ||||
|     apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \ | ||||
|     curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \ | ||||
|     echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \ | ||||
|     apt-get update && \ | ||||
|     apt-get install -y --no-install-recommends libpq-dev postgresql-client-12 postgresql-client-11 build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \ | ||||
|     apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \ | ||||
|     pip install -r /requirements.txt --no-cache-dir && \ | ||||
|     apt-get remove --purge -y build-essential && \ | ||||
|     apt-get remove --purge -y build-essential git && \ | ||||
|     apt-get autoremove --purge -y && \ | ||||
|     apt-get clean && \ | ||||
|     rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ | ||||
|     # This is quite hacky, but docker has no guaranteed Group ID | ||||
|     # we could instead check for the GID of the socket and add the user dynamically, | ||||
|     # but then we have to drop permmissions later | ||||
|     groupadd -g 998 docker_998 && \ | ||||
|     groupadd -g 999 docker_999 && \ | ||||
|     adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ | ||||
|     usermod -a -G docker_998 authentik && \ | ||||
|     usermod -a -G docker_999 authentik && \ | ||||
|     mkdir /backups && \ | ||||
|     chown authentik:authentik /backups | ||||
|  | ||||
| @ -77,7 +70,6 @@ COPY ./lifecycle/ /lifecycle | ||||
| COPY --from=builder /work/authentik /authentik-proxy | ||||
|  | ||||
| USER authentik | ||||
| STOPSIGNAL SIGINT | ||||
| ENV TMPDIR /dev/shm/ | ||||
| ENV PYTHONUBUFFERED 1 | ||||
| ENTRYPOINT [ "/lifecycle/bootstrap.sh" ] | ||||
|  | ||||
							
								
								
									
										17
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								Makefile
									
									
									
									
									
								
							| @ -1,3 +1,6 @@ | ||||
| .SHELLFLAGS += -x -e | ||||
| PWD = $(shell pwd) | ||||
|  | ||||
| all: lint-fix lint test gen | ||||
|  | ||||
| test-integration: | ||||
| @ -24,12 +27,14 @@ lint: | ||||
|  | ||||
| gen: | ||||
| 	./manage.py generate_swagger -o swagger.yaml -f yaml | ||||
|  | ||||
| local-stack: | ||||
| 	export AUTHENTIK_TAG=testing | ||||
| 	docker build -t beryju/authentik:testng . | ||||
| 	docker-compose up -d | ||||
| 	docker-compose run --rm server migrate | ||||
| 	docker run \ | ||||
| 		--rm -v ${PWD}:/local \ | ||||
| 		openapitools/openapi-generator-cli generate \ | ||||
| 		-i /local/swagger.yaml \ | ||||
| 		-g typescript-fetch \ | ||||
| 		-o /local/web/api \ | ||||
| 		--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0 | ||||
| 	cd web/api && npx tsc | ||||
|  | ||||
| run: | ||||
| 	go run -v cmd/server/main.go | ||||
|  | ||||
							
								
								
									
										4
									
								
								Pipfile
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								Pipfile
									
									
									
									
									
								
							| @ -11,7 +11,7 @@ channels-redis = "*" | ||||
| dacite = "*" | ||||
| defusedxml = "*" | ||||
| django = "*" | ||||
| django-dbbackup = "*" | ||||
| django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' } | ||||
| django-filter = "*" | ||||
| django-guardian = "*" | ||||
| django-model-utils = "*" | ||||
| @ -50,7 +50,7 @@ python_version = "3.9" | ||||
|  | ||||
| [dev-packages] | ||||
| bandit = "*" | ||||
| black = "==20.8b1" | ||||
| black = "==21.5b1" | ||||
| bump2version = "*" | ||||
| colorama = "*" | ||||
| coverage = "*" | ||||
|  | ||||
							
								
								
									
										193
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										193
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							| @ -1,7 +1,7 @@ | ||||
| { | ||||
|     "_meta": { | ||||
|         "hash": { | ||||
|             "sha256": "17be2923cf8d281e430ec1467aea723806ac6f7c58fc6553ede92317e43f4d14" | ||||
|             "sha256": "8a32708c1c04f8da03c817df973de28c37c97ee773f571ce0b3f3f834e1b7094" | ||||
|         }, | ||||
|         "pipfile-spec": 6, | ||||
|         "requires": { | ||||
| @ -116,18 +116,18 @@ | ||||
|         }, | ||||
|         "boto3": { | ||||
|             "hashes": [ | ||||
|                 "sha256:2f0d76660d484ff4c8c2efe9171c1281b38681e6806f87cf100e822432eda11e", | ||||
|                 "sha256:cbaa8df5faf81730f117bfa0e3fcda68ec3fa9449a05847aa6140a3f4c087765" | ||||
|                 "sha256:13cfe0e3ae1bdc7baf4272b1814a7e760fbb508b19d6ac3f472a6bbd64baad61", | ||||
|                 "sha256:ce08b88a2d7a0ad8edb385f84ea4914296fee6813c66ebf0def956d5278de793" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==1.17.69" | ||||
|             "version": "==1.17.73" | ||||
|         }, | ||||
|         "botocore": { | ||||
|             "hashes": [ | ||||
|                 "sha256:7e94d3777763ece33d282b437e3b05b5567b9af816bd7819dbe4eb9bc6db6082", | ||||
|                 "sha256:f755b19ddebda0f8ab7afc75ebcb5412dd802eca0a7e670f5fff8c5e58bc88b1" | ||||
|                 "sha256:4b4aa58c61d4b125bc6ec1597924b2749e19de8f2c9a374ac087aa2561e71828", | ||||
|                 "sha256:69dc0b6fdc0855f5a4f8b1d29c96b9cec44e71054fea0f968e5904d6ccfd4fd9" | ||||
|             ], | ||||
|             "version": "==1.20.69" | ||||
|             "version": "==1.20.73" | ||||
|         }, | ||||
|         "cachetools": { | ||||
|             "hashes": [ | ||||
| @ -312,18 +312,15 @@ | ||||
|         }, | ||||
|         "django": { | ||||
|             "hashes": [ | ||||
|                 "sha256:0a1d195ad65c52bf275b8277b3d49680bd1137a5f55039a806f25f6b9752ce3d", | ||||
|                 "sha256:18dd3145ddbd04bf189ff79b9954d08fda5171ea7b57bf705789fea766a07d50" | ||||
|                 "sha256:13ac78dbfd189532cad8f383a27e58e18b3d33f80009ceb476d7fcbfc5dcebd8", | ||||
|                 "sha256:7e0a1393d18c16b503663752a8b6790880c5084412618990ce8a81cc908b4962" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==3.2.2" | ||||
|             "version": "==3.2.3" | ||||
|         }, | ||||
|         "django-dbbackup": { | ||||
|             "hashes": [ | ||||
|                 "sha256:bb109735cae98b64ad084e5b461b7aca2d7b39992f10c9ed9435e3ebb6fb76c8" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==3.3.0" | ||||
|             "git": "https://github.com/django-dbbackup/django-dbbackup.git", | ||||
|             "ref": "9d1909c30a3271c8c9c8450add30d6e0b996e145" | ||||
|         }, | ||||
|         "django-filter": { | ||||
|             "hashes": [ | ||||
| @ -429,11 +426,11 @@ | ||||
|         }, | ||||
|         "geoip2": { | ||||
|             "hashes": [ | ||||
|                 "sha256:57d8d15de2527e0697bbef44fc16812bba709f03a07ef99297bd56c1df3b1efd", | ||||
|                 "sha256:707025542ef076bd8fd80e97138bebdb7812527b2a007d141a27ad98b0370fff" | ||||
|                 "sha256:906a1dbf15a179a1af3522970e8420ab15bb3e0afc526942cc179e12146d9c1d", | ||||
|                 "sha256:b97b44031fdc463e84eb1316b4f19edd978cb1d78703465fcb1e36dc5a822ba6" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==4.1.0" | ||||
|             "version": "==4.2.0" | ||||
|         }, | ||||
|         "google-auth": { | ||||
|             "hashes": [ | ||||
| @ -560,10 +557,10 @@ | ||||
|         }, | ||||
|         "jinja2": { | ||||
|             "hashes": [ | ||||
|                 "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", | ||||
|                 "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" | ||||
|                 "sha256:2f2de5285cf37f33d33ecd4a9080b75c87cd0c1994d5a9c6df17131ea1f049c6", | ||||
|                 "sha256:ea8d7dd814ce9df6de6a761ec7f1cac98afe305b8cdc4aaae4e114b8d8ce24c5" | ||||
|             ], | ||||
|             "version": "==2.11.3" | ||||
|             "version": "==3.0.0" | ||||
|         }, | ||||
|         "jmespath": { | ||||
|             "hashes": [ | ||||
| @ -588,11 +585,11 @@ | ||||
|         }, | ||||
|         "kubernetes": { | ||||
|             "hashes": [ | ||||
|                 "sha256:23c85d8571df8f56e773f1a413bc081537536dc47e2b5e8dc2e6262edb2c57ca", | ||||
|                 "sha256:ec52ea01d52e2ec3da255992f7e859f3a76f2bdb51cf65ba8cd71dfc309d8daa" | ||||
|                 "sha256:225a95a0aadbd5b645ab389d941a7980db8cdad2a776fde64d1b43fc3299bde9", | ||||
|                 "sha256:c69b318696ba797dcf63eb928a8d4370c52319f4140023c502d7dfdf2080eb79" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==12.0.1" | ||||
|             "version": "==17.17.0" | ||||
|         }, | ||||
|         "ldap3": { | ||||
|             "hashes": [ | ||||
| @ -656,60 +653,42 @@ | ||||
|         }, | ||||
|         "markupsafe": { | ||||
|             "hashes": [ | ||||
|                 "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", | ||||
|                 "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", | ||||
|                 "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", | ||||
|                 "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", | ||||
|                 "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", | ||||
|                 "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", | ||||
|                 "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", | ||||
|                 "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", | ||||
|                 "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", | ||||
|                 "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", | ||||
|                 "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", | ||||
|                 "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", | ||||
|                 "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", | ||||
|                 "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", | ||||
|                 "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", | ||||
|                 "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", | ||||
|                 "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", | ||||
|                 "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", | ||||
|                 "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", | ||||
|                 "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", | ||||
|                 "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", | ||||
|                 "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", | ||||
|                 "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", | ||||
|                 "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", | ||||
|                 "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", | ||||
|                 "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", | ||||
|                 "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", | ||||
|                 "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", | ||||
|                 "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", | ||||
|                 "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", | ||||
|                 "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", | ||||
|                 "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", | ||||
|                 "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", | ||||
|                 "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", | ||||
|                 "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", | ||||
|                 "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", | ||||
|                 "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", | ||||
|                 "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", | ||||
|                 "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", | ||||
|                 "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", | ||||
|                 "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", | ||||
|                 "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", | ||||
|                 "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", | ||||
|                 "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", | ||||
|                 "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", | ||||
|                 "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", | ||||
|                 "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", | ||||
|                 "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", | ||||
|                 "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", | ||||
|                 "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", | ||||
|                 "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", | ||||
|                 "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" | ||||
|                 "sha256:007dc055dbce5b1104876acee177dbfd18757e19d562cd440182e1f492e96b95", | ||||
|                 "sha256:031bf79a27d1c42f69c276d6221172417b47cb4b31cdc73d362a9bf5a1889b9f", | ||||
|                 "sha256:161d575fa49395860b75da5135162481768b11208490d5a2143ae6785123e77d", | ||||
|                 "sha256:24bbc3507fb6dfff663af7900a631f2aca90d5a445f272db5fc84999fa5718bc", | ||||
|                 "sha256:2efaeb1baff547063bad2b2893a8f5e9c459c4624e1a96644bbba08910ae34e0", | ||||
|                 "sha256:32200f562daaab472921a11cbb63780f1654552ae49518196fc361ed8e12e901", | ||||
|                 "sha256:3261fae28155e5c8634dd7710635fe540a05b58f160cef7713c7700cb9980e66", | ||||
|                 "sha256:3b54a9c68995ef4164567e2cd1a5e16db5dac30b2a50c39c82db8d4afaf14f63", | ||||
|                 "sha256:3c352ff634e289061711608f5e474ec38dbaa21e3e168820d53d5f4015e5b91b", | ||||
|                 "sha256:3fb47f97f1d338b943126e90b79cad50d4fcfa0b80637b5a9f468941dbbd9ce5", | ||||
|                 "sha256:441ce2a8c17683d97e06447fcbccbdb057cbf587c78eb75ae43ea7858042fe2c", | ||||
|                 "sha256:45535241baa0fc0ba2a43961a1ac7562ca3257f46c4c3e9c0de38b722be41bd1", | ||||
|                 "sha256:4aca81a687975b35e3e80bcf9aa93fe10cd57fac37bf18b2314c186095f57e05", | ||||
|                 "sha256:4cc563836f13c57f1473bc02d1e01fc37bab70ad4ee6be297d58c1d66bc819bf", | ||||
|                 "sha256:4fae0677f712ee090721d8b17f412f1cbceefbf0dc180fe91bab3232f38b4527", | ||||
|                 "sha256:58bc9fce3e1557d463ef5cee05391a05745fd95ed660f23c1742c711712c0abb", | ||||
|                 "sha256:664832fb88b8162268928df233f4b12a144a0c78b01d38b81bdcf0fc96668ecb", | ||||
|                 "sha256:70820a1c96311e02449591cbdf5cd1c6a34d5194d5b55094ab725364375c9eb2", | ||||
|                 "sha256:79b2ae94fa991be023832e6bcc00f41dbc8e5fe9d997a02db965831402551730", | ||||
|                 "sha256:83cf0228b2f694dcdba1374d5312f2277269d798e65f40344964f642935feac1", | ||||
|                 "sha256:87de598edfa2230ff274c4de7fcf24c73ffd96208c8e1912d5d0fee459767d75", | ||||
|                 "sha256:8f806bfd0f218477d7c46a11d3e52dc7f5fdfaa981b18202b7dc84bbc287463b", | ||||
|                 "sha256:90053234a6479738fd40d155268af631c7fca33365f964f2208867da1349294b", | ||||
|                 "sha256:a00dce2d96587651ef4fa192c17e039e8cfab63087c67e7d263a5533c7dad715", | ||||
|                 "sha256:a08cd07d3c3c17cd33d9e66ea9dee8f8fc1c48e2d11bd88fd2dc515a602c709b", | ||||
|                 "sha256:a19d39b02a24d3082856a5b06490b714a9d4179321225bbf22809ff1e1887cc8", | ||||
|                 "sha256:d00a669e4a5bec3ee6dbeeeedd82a405ced19f8aeefb109a012ea88a45afff96", | ||||
|                 "sha256:dab0c685f21f4a6c95bfc2afd1e7eae0033b403dd3d8c1b6d13a652ada75b348", | ||||
|                 "sha256:df561f65049ed3556e5b52541669310e88713fdae2934845ec3606f283337958", | ||||
|                 "sha256:e4570d16f88c7f3032ed909dc9e905a17da14a1c4cfd92608e3fda4cb1208bbd", | ||||
|                 "sha256:e77e4b983e2441aff0c0d07ee711110c106b625f440292dfe02a2f60c8218bd6", | ||||
|                 "sha256:e79212d09fc0e224d20b43ad44bb0a0a3416d1e04cf6b45fed265114a5d43d20", | ||||
|                 "sha256:f58b5ba13a5689ca8317b98439fccfbcc673acaaf8241c1869ceea40f5d585bf", | ||||
|                 "sha256:fef86115fdad7ae774720d7103aa776144cf9b66673b4afa9bcaa7af990ed07b" | ||||
|             ], | ||||
|             "version": "==1.1.1" | ||||
|             "version": "==2.0.0" | ||||
|         }, | ||||
|         "maxminddb": { | ||||
|             "hashes": [ | ||||
| @ -1439,10 +1418,11 @@ | ||||
|         }, | ||||
|         "black": { | ||||
|             "hashes": [ | ||||
|                 "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea" | ||||
|                 "sha256:23695358dbcb3deafe7f0a3ad89feee5999a46be5fec21f4f1d108be0bcdb3b1", | ||||
|                 "sha256:8a60071a0043876a4ae96e6c69bd3a127dad2c1ca7c8083573eb82f92705d008" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==20.8b1" | ||||
|             "version": "==21.5b1" | ||||
|         }, | ||||
|         "bump2version": { | ||||
|             "hashes": [ | ||||
| @ -1548,10 +1528,10 @@ | ||||
|         }, | ||||
|         "gitpython": { | ||||
|             "hashes": [ | ||||
|                 "sha256:05af150f47a5cca3f4b0af289b73aef8cf3c4fe2385015b06220cbcdee48bb6e", | ||||
|                 "sha256:a77824e516d3298b04fb36ec7845e92747df8fcfee9cacc32dd6239f9652f867" | ||||
|                 "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135", | ||||
|                 "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e" | ||||
|             ], | ||||
|             "version": "==3.1.15" | ||||
|             "version": "==3.1.17" | ||||
|         }, | ||||
|         "idna": { | ||||
|             "hashes": [ | ||||
| @ -1691,11 +1671,11 @@ | ||||
|         }, | ||||
|         "pytest-django": { | ||||
|             "hashes": [ | ||||
|                 "sha256:80f8875226ec4dc0b205f0578072034563879d98d9b1bec143a80b9045716cb0", | ||||
|                 "sha256:a51150d8962200250e850c6adcab670779b9c2aa07271471059d1fb92a843fa9" | ||||
|                 "sha256:d1c6758a592fb0ef8abaa2fe12dd28858c1dcfc3d466102ffe52aa8934733dca", | ||||
|                 "sha256:f96c4556f4e7b15d987dd1dcc1d1526df81d40c1548d31ce840d597ed2be8c46" | ||||
|             ], | ||||
|             "index": "pypi", | ||||
|             "version": "==4.2.0" | ||||
|             "version": "==4.3.0" | ||||
|         }, | ||||
|         "pyyaml": { | ||||
|             "hashes": [ | ||||
| @ -1829,49 +1809,6 @@ | ||||
|             ], | ||||
|             "version": "==0.10.2" | ||||
|         }, | ||||
|         "typed-ast": { | ||||
|             "hashes": [ | ||||
|                 "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", | ||||
|                 "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", | ||||
|                 "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", | ||||
|                 "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", | ||||
|                 "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", | ||||
|                 "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", | ||||
|                 "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", | ||||
|                 "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", | ||||
|                 "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", | ||||
|                 "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", | ||||
|                 "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", | ||||
|                 "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", | ||||
|                 "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", | ||||
|                 "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", | ||||
|                 "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", | ||||
|                 "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", | ||||
|                 "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", | ||||
|                 "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", | ||||
|                 "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", | ||||
|                 "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", | ||||
|                 "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", | ||||
|                 "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", | ||||
|                 "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", | ||||
|                 "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", | ||||
|                 "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", | ||||
|                 "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", | ||||
|                 "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", | ||||
|                 "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", | ||||
|                 "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", | ||||
|                 "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" | ||||
|             ], | ||||
|             "version": "==1.4.3" | ||||
|         }, | ||||
|         "typing-extensions": { | ||||
|             "hashes": [ | ||||
|                 "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", | ||||
|                 "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", | ||||
|                 "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" | ||||
|             ], | ||||
|             "version": "==3.10.0.0" | ||||
|         }, | ||||
|         "urllib3": { | ||||
|             "extras": [ | ||||
|                 "secure" | ||||
|  | ||||
| @ -1,3 +1,3 @@ | ||||
| """authentik""" | ||||
| __version__ = "2021.5.1-rc6" | ||||
| __version__ = "2021.5.4" | ||||
| ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" | ||||
|  | ||||
| @ -7,6 +7,7 @@ from django.urls import reverse | ||||
| from authentik import __version__ | ||||
| from authentik.core.models import Group, User | ||||
| from authentik.core.tasks import clean_expired_models | ||||
| from authentik.events.monitored_tasks import TaskResultStatus | ||||
|  | ||||
|  | ||||
| class TestAdminAPI(TestCase): | ||||
| @ -30,6 +31,26 @@ class TestAdminAPI(TestCase): | ||||
|             any(task["task_name"] == "clean_expired_models" for task in body) | ||||
|         ) | ||||
|  | ||||
|     def test_tasks_single(self): | ||||
|         """Test Task API (read single)""" | ||||
|         clean_expired_models.delay() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:admin_system_tasks-detail", | ||||
|                 kwargs={"pk": "clean_expired_models"}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|         body = loads(response.content) | ||||
|         self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name) | ||||
|         self.assertEqual(body["task_name"], "clean_expired_models") | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"} | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 404) | ||||
|  | ||||
|     def test_tasks_retry(self): | ||||
|         """Test Task API (retry)""" | ||||
|         clean_expired_models.delay() | ||||
|  | ||||
| @ -42,7 +42,7 @@ def token_from_header(raw_header: bytes) -> Optional[Token]: | ||||
|     return tokens.first() | ||||
| 
 | ||||
| 
 | ||||
| class AuthentikTokenAuthentication(BaseAuthentication): | ||||
| class TokenAuthentication(BaseAuthentication): | ||||
|     """Token-based authentication using HTTP Bearer authentication""" | ||||
| 
 | ||||
|     def authenticate(self, request: Request) -> Union[tuple[User, Any], None]: | ||||
| @ -54,4 +54,4 @@ class AuthentikTokenAuthentication(BaseAuthentication): | ||||
|         if not token: | ||||
|             return None | ||||
| 
 | ||||
|         return (token.user, None) | ||||
|         return (token.user, None)  # pragma: no cover | ||||
							
								
								
									
										35
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								authentik/api/authorization.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,35 @@ | ||||
| """API Authorization""" | ||||
| from django.db.models import Model | ||||
| from django.db.models.query import QuerySet | ||||
| from rest_framework.filters import BaseFilterBackend | ||||
| from rest_framework.permissions import BasePermission | ||||
| from rest_framework.request import Request | ||||
|  | ||||
|  | ||||
| class OwnerFilter(BaseFilterBackend): | ||||
|     """Filter objects by their owner""" | ||||
|  | ||||
|     owner_key = "user" | ||||
|  | ||||
|     def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet: | ||||
|         return queryset.filter(**{self.owner_key: request.user}) | ||||
|  | ||||
|  | ||||
| class OwnerPermissions(BasePermission): | ||||
|     """Authorize requests by an object's owner matching the requesting user""" | ||||
|  | ||||
|     owner_key = "user" | ||||
|  | ||||
|     def has_permission(self, request: Request, view) -> bool: | ||||
|         """If the user is authenticated, we allow all requests here. For listing, the | ||||
|         object-level permissions are done by the filter backend""" | ||||
|         return request.user.is_authenticated | ||||
|  | ||||
|     def has_object_permission(self, request: Request, view, obj: Model) -> bool: | ||||
|         """Check if the object's owner matches the currently logged in user""" | ||||
|         if not hasattr(obj, self.owner_key): | ||||
|             return False | ||||
|         owner = getattr(obj, self.owner_key) | ||||
|         if owner != request.user: | ||||
|             return False | ||||
|         return True | ||||
| @ -5,7 +5,7 @@ from django.test import TestCase | ||||
| from guardian.shortcuts import get_anonymous_user | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
|  | ||||
| from authentik.api.auth import token_from_header | ||||
| from authentik.api.authentication import token_from_header | ||||
| from authentik.core.models import Token, TokenIntents | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -22,3 +22,10 @@ class TestSwaggerGeneration(APITestCase): | ||||
|             reverse("authentik_api:schema-json", kwargs={"format": ".json"}), | ||||
|         ) | ||||
|         self.assertTrue(loads(response.content.decode())) | ||||
|  | ||||
|     def test_browser(self): | ||||
|         """Test API Browser""" | ||||
|         response = self.client.get( | ||||
|             reverse("authentik_api:swagger"), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
| @ -169,9 +169,19 @@ router.register("propertymappings/scope", ScopeMappingViewSet) | ||||
| router.register("authenticators/static", StaticDeviceViewSet) | ||||
| router.register("authenticators/totp", TOTPDeviceViewSet) | ||||
| router.register("authenticators/webauthn", WebAuthnDeviceViewSet) | ||||
| router.register("authenticators/admin/static", StaticAdminDeviceViewSet) | ||||
| router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet) | ||||
| router.register("authenticators/admin/webauthn", WebAuthnAdminDeviceViewSet) | ||||
| router.register( | ||||
|     "authenticators/admin/static", | ||||
|     StaticAdminDeviceViewSet, | ||||
|     basename="admin-staticdevice", | ||||
| ) | ||||
| router.register( | ||||
|     "authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice" | ||||
| ) | ||||
| router.register( | ||||
|     "authenticators/admin/webauthn", | ||||
|     WebAuthnAdminDeviceViewSet, | ||||
|     basename="admin-webauthndevice", | ||||
| ) | ||||
|  | ||||
| router.register("stages/all", StageViewSet) | ||||
| router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet) | ||||
|  | ||||
| @ -4,6 +4,7 @@ from typing import Optional | ||||
| from django.core.cache import cache | ||||
| from django.db.models import QuerySet | ||||
| from django.http.response import HttpResponseBadRequest | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from drf_yasg import openapi | ||||
| from drf_yasg.utils import no_body, swagger_auto_schema | ||||
| from rest_framework.decorators import action | ||||
| @ -22,6 +23,7 @@ from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.models import Application | ||||
| from authentik.events.models import EventAction | ||||
| from authentik.policies.engine import PolicyEngine | ||||
| from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| @ -101,7 +103,9 @@ class ApplicationViewSet(ModelViewSet): | ||||
|     # pylint: disable=unused-argument | ||||
|     def check_access(self, request: Request, slug: str) -> Response: | ||||
|         """Check access to a single application by slug""" | ||||
|         application = self.get_object() | ||||
|         # Don't use self.get_object as that checks for view_application permission | ||||
|         # which the user might not have, even if they have access | ||||
|         application = get_object_or_404(Application, slug=slug) | ||||
|         engine = PolicyEngine(application, self.request.user, self.request) | ||||
|         engine.build() | ||||
|         if engine.passing: | ||||
| @ -119,6 +123,7 @@ class ApplicationViewSet(ModelViewSet): | ||||
|     ) | ||||
|     def list(self, request: Request) -> Response: | ||||
|         """Custom list method that checks Policy based access instead of guardian""" | ||||
|         self.request.session.pop(USER_LOGIN_AUTHENTICATED, None) | ||||
|         queryset = self._filter_queryset_for_list(self.get_queryset()) | ||||
|         self.paginate_queryset(queryset) | ||||
|  | ||||
|  | ||||
| @ -78,7 +78,7 @@ class PropertyMappingViewSet( | ||||
|     filterset_fields = {"managed": ["isnull"]} | ||||
|     ordering = ["name"] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return PropertyMapping.objects.select_subclasses() | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|  | ||||
| @ -63,7 +63,7 @@ class ProviderViewSet( | ||||
|         "application__name", | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Provider.objects.select_subclasses() | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|  | ||||
| @ -61,7 +61,7 @@ class SourceViewSet( | ||||
|     serializer_class = SourceSerializer | ||||
|     lookup_field = "slug" | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Source.objects.select_subclasses() | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|  | ||||
| @ -139,7 +139,7 @@ class UserViewSet(ModelViewSet): | ||||
|     search_fields = ["username", "name", "is_active"] | ||||
|     filterset_class = UsersFilter | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return User.objects.all().exclude(pk=get_anonymous_user().pk) | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: SessionUserSerializer(many=False)}) | ||||
|  | ||||
| @ -4,7 +4,7 @@ from channels.generic.websocket import JsonWebsocketConsumer | ||||
| from rest_framework.exceptions import AuthenticationFailed | ||||
| from structlog.stdlib import get_logger | ||||
|  | ||||
| from authentik.api.auth import token_from_header | ||||
| from authentik.api.authentication import token_from_header | ||||
| from authentik.core.models import User | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
							
								
								
									
										20
									
								
								authentik/core/migrations/0021_alter_application_slug.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								authentik/core/migrations/0021_alter_application_slug.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,20 @@ | ||||
| # Generated by Django 3.2.3 on 2021-05-14 08:48 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_core", "0020_source_user_matching_mode"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="application", | ||||
|             name="slug", | ||||
|             field=models.SlugField( | ||||
|                 help_text="Internal application name, used in URLs.", unique=True | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @ -207,7 +207,9 @@ class Application(PolicyBindingModel): | ||||
|     add custom fields and other properties""" | ||||
|  | ||||
|     name = models.TextField(help_text=_("Application's display Name.")) | ||||
|     slug = models.SlugField(help_text=_("Internal application name, used in URLs.")) | ||||
|     slug = models.SlugField( | ||||
|         help_text=_("Internal application name, used in URLs."), unique=True | ||||
|     ) | ||||
|     provider = models.OneToOneField( | ||||
|         "Provider", null=True, blank=True, default=None, on_delete=models.SET_DEFAULT | ||||
|     ) | ||||
|  | ||||
| @ -75,5 +75,6 @@ def backup_database(self: MonitoredTask):  # pragma: no cover | ||||
|         Boto3Error, | ||||
|         PermissionError, | ||||
|         CommandConnectorError, | ||||
|         ValueError, | ||||
|     ) as exc: | ||||
|         self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc)) | ||||
|  | ||||
| @ -24,7 +24,7 @@ class TestApplicationsAPI(APITestCase): | ||||
|         ) | ||||
|  | ||||
|     def test_check_access(self): | ||||
|         """Test check_access operation """ | ||||
|         """Test check_access operation""" | ||||
|         self.client.force_login(self.user) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|  | ||||
| @ -21,7 +21,7 @@ class TestModels(TestCase): | ||||
|         self.assertTrue(token.is_expired) | ||||
|  | ||||
|     def test_token_expire_no_expire(self): | ||||
|         """Test token expiring with "expiring" set """ | ||||
|         """Test token expiring with "expiring" set""" | ||||
|         token = Token.objects.create( | ||||
|             expires=now(), user=get_anonymous_user(), expiring=False | ||||
|         ) | ||||
|  | ||||
| @ -3,7 +3,9 @@ import django_filters | ||||
| from cryptography.hazmat.backends import default_backend | ||||
| from cryptography.hazmat.primitives.serialization import load_pem_private_key | ||||
| from cryptography.x509 import load_pem_x509_certificate | ||||
| from django.http.response import HttpResponse | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_yasg import openapi | ||||
| from drf_yasg.utils import swagger_auto_schema | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import ( | ||||
| @ -145,7 +147,16 @@ class CertificateKeyPairViewSet(ModelViewSet): | ||||
|         serializer = self.get_serializer(instance) | ||||
|         return Response(serializer.data) | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: CertificateDataSerializer(many=False)}) | ||||
|     @swagger_auto_schema( | ||||
|         manual_parameters=[ | ||||
|             openapi.Parameter( | ||||
|                 name="download", | ||||
|                 in_=openapi.IN_QUERY, | ||||
|                 type=openapi.TYPE_BOOLEAN, | ||||
|             ) | ||||
|         ], | ||||
|         responses={200: CertificateDataSerializer(many=False)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     # pylint: disable=invalid-name, unused-argument | ||||
|     def view_certificate(self, request: Request, pk: str) -> Response: | ||||
| @ -156,11 +167,29 @@ class CertificateKeyPairViewSet(ModelViewSet): | ||||
|             secret=certificate, | ||||
|             type="certificate", | ||||
|         ).from_http(request) | ||||
|         if "download" in request._request.GET: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse( | ||||
|                 certificate.certificate_data, content_type="application/x-pem-file" | ||||
|             ) | ||||
|             response[ | ||||
|                 "Content-Disposition" | ||||
|             ] = f'attachment; filename="{certificate.name}_certificate.pem"' | ||||
|             return response | ||||
|         return Response( | ||||
|             CertificateDataSerializer({"data": certificate.certificate_data}).data | ||||
|         ) | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: CertificateDataSerializer(many=False)}) | ||||
|     @swagger_auto_schema( | ||||
|         manual_parameters=[ | ||||
|             openapi.Parameter( | ||||
|                 name="download", | ||||
|                 in_=openapi.IN_QUERY, | ||||
|                 type=openapi.TYPE_BOOLEAN, | ||||
|             ) | ||||
|         ], | ||||
|         responses={200: CertificateDataSerializer(many=False)}, | ||||
|     ) | ||||
|     @action(detail=True, pagination_class=None, filter_backends=[]) | ||||
|     # pylint: disable=invalid-name, unused-argument | ||||
|     def view_private_key(self, request: Request, pk: str) -> Response: | ||||
| @ -171,4 +200,13 @@ class CertificateKeyPairViewSet(ModelViewSet): | ||||
|             secret=certificate, | ||||
|             type="private_key", | ||||
|         ).from_http(request) | ||||
|         if "download" in request._request.GET: | ||||
|             # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html | ||||
|             response = HttpResponse( | ||||
|                 certificate.key_data, content_type="application/x-pem-file" | ||||
|             ) | ||||
|             response[ | ||||
|                 "Content-Disposition" | ||||
|             ] = f'attachment; filename="{certificate.name}_private_key.pem"' | ||||
|             return response | ||||
|         return Response(CertificateDataSerializer({"data": certificate.key_data}).data) | ||||
|  | ||||
| @ -2,7 +2,9 @@ | ||||
| import datetime | ||||
|  | ||||
| from django.test import TestCase | ||||
| from django.urls import reverse | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.crypto.api import CertificateKeyPairSerializer | ||||
| from authentik.crypto.builder import CertificateBuilder | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| @ -47,3 +49,45 @@ class TestCrypto(TestCase): | ||||
|         now = datetime.datetime.today() | ||||
|         self.assertEqual(instance.name, "test-cert") | ||||
|         self.assertEqual((instance.certificate.not_valid_after - now).days, 2) | ||||
|  | ||||
|     def test_certificate_download(self): | ||||
|         """Test certificate export (download)""" | ||||
|         self.client.force_login(User.objects.get(username="akadmin")) | ||||
|         keypair = CertificateKeyPair.objects.first() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-certificate", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(200, response.status_code) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-certificate", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|             + "?download", | ||||
|         ) | ||||
|         self.assertEqual(200, response.status_code) | ||||
|         self.assertIn("Content-Disposition", response) | ||||
|  | ||||
|     def test_private_key_download(self): | ||||
|         """Test private_key export (download)""" | ||||
|         self.client.force_login(User.objects.get(username="akadmin")) | ||||
|         keypair = CertificateKeyPair.objects.first() | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-private-key", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|         ) | ||||
|         self.assertEqual(200, response.status_code) | ||||
|         response = self.client.get( | ||||
|             reverse( | ||||
|                 "authentik_api:certificatekeypair-view-private-key", | ||||
|                 kwargs={"pk": keypair.pk}, | ||||
|             ) | ||||
|             + "?download", | ||||
|         ) | ||||
|         self.assertEqual(200, response.status_code) | ||||
|         self.assertIn("Content-Disposition", response) | ||||
|  | ||||
| @ -1,12 +1,12 @@ | ||||
| """Notification API Views""" | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from guardian.utils import get_anonymous_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.fields import ReadOnlyField | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerPermissions | ||||
| from authentik.events.api.event import EventSerializer | ||||
| from authentik.events.models import Notification | ||||
|  | ||||
| @ -49,12 +49,5 @@ class NotificationViewSet( | ||||
|         "event", | ||||
|         "seen", | ||||
|     ] | ||||
|     filter_backends = [ | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         return Notification.objects.filter(user=user.pk) | ||||
|     permission_classes = [OwnerPermissions] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
| @ -2,22 +2,25 @@ | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.groups import GroupSerializer | ||||
| from authentik.events.models import NotificationRule | ||||
|  | ||||
|  | ||||
| class NotificationRuleSerializer(ModelSerializer): | ||||
|     """NotificationRule Serializer""" | ||||
|  | ||||
|     group_obj = GroupSerializer(read_only=True, source="group") | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         model = NotificationRule | ||||
|         depth = 2 | ||||
|         fields = [ | ||||
|             "pk", | ||||
|             "name", | ||||
|             "transports", | ||||
|             "severity", | ||||
|             "group", | ||||
|             "group_obj", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -210,6 +210,7 @@ class FlowViewSet(ModelViewSet): | ||||
|                     request.user, "authentik_policies.view_policybinding" | ||||
|                 ) | ||||
|                 .filter(target=stage_binding) | ||||
|                 .exclude(policy__isnull=True) | ||||
|                 .order_by("order") | ||||
|             ): | ||||
|                 body.append( | ||||
|  | ||||
| @ -65,7 +65,7 @@ class StageViewSet( | ||||
|     search_fields = ["name"] | ||||
|     filterset_fields = ["name"] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Stage.objects.select_subclasses() | ||||
|  | ||||
|     @swagger_auto_schema(responses={200: TypeCreateSerializer(many=True)}) | ||||
|  | ||||
| @ -21,7 +21,7 @@ context["user_backend"] = "django.contrib.auth.backends.ModelBackend" | ||||
| return True""" | ||||
|  | ||||
|  | ||||
| def create_default_oob_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
| def create_default_oobe_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): | ||||
|     from authentik.stages.prompt.models import FieldTypes | ||||
|  | ||||
|     User = apps.get_model("authentik_core", "User") | ||||
| @ -52,20 +52,20 @@ def create_default_oob_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) | ||||
|  | ||||
|     # Create a policy that sets the flow's user | ||||
|     prefill_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create( | ||||
|         name="default-oob-prefill-user", | ||||
|         name="default-oobe-prefill-user", | ||||
|         defaults={"expression": PREFILL_POLICY_EXPRESSION}, | ||||
|     ) | ||||
|     password_usable_policy, _ = ExpressionPolicy.objects.using( | ||||
|         db_alias | ||||
|     ).update_or_create( | ||||
|         name="default-oob-password-usable", | ||||
|         name="default-oobe-password-usable", | ||||
|         defaults={"expression": PW_USABLE_POLICY_EXPRESSION}, | ||||
|     ) | ||||
|  | ||||
|     prompt_header, _ = Prompt.objects.using(db_alias).update_or_create( | ||||
|         field_key="oob-header-text", | ||||
|         field_key="oobe-header-text", | ||||
|         defaults={ | ||||
|             "label": "oob-header-text", | ||||
|             "label": "oobe-header-text", | ||||
|             "type": FieldTypes.STATIC, | ||||
|             "placeholder": "Welcome to authentik! Please set a password for the default admin user, akadmin.", | ||||
|             "order": 100, | ||||
| @ -84,7 +84,7 @@ def create_default_oob_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) | ||||
|     password_second = Prompt.objects.using(db_alias).get(field_key="password_repeat") | ||||
|  | ||||
|     prompt_stage, _ = PromptStage.objects.using(db_alias).update_or_create( | ||||
|         name="default-oob-password", | ||||
|         name="default-oobe-password", | ||||
|     ) | ||||
|     prompt_stage.fields.set( | ||||
|         [prompt_header, prompt_email, password_first, password_second] | ||||
| @ -102,7 +102,7 @@ def create_default_oob_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) | ||||
|         slug="initial-setup", | ||||
|         designation=FlowDesignation.STAGE_CONFIGURATION, | ||||
|         defaults={ | ||||
|             "name": "default-oob-setup", | ||||
|             "name": "default-oobe-setup", | ||||
|             "title": "Welcome to authentik!", | ||||
|         }, | ||||
|     ) | ||||
| @ -146,5 +146,5 @@ class Migration(migrations.Migration): | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(create_default_oob_flow), | ||||
|         migrations.RunPython(create_default_oobe_flow), | ||||
|     ] | ||||
|  | ||||
| @ -298,7 +298,7 @@ class CancelView(View): | ||||
|         if SESSION_KEY_PLAN in request.session: | ||||
|             del request.session[SESSION_KEY_PLAN] | ||||
|             LOGGER.debug("Canceled current plan") | ||||
|         return redirect("authentik_core:root-redirect") | ||||
|         return redirect("authentik_flows:default-invalidation") | ||||
|  | ||||
|  | ||||
| class ToDefaultFlow(View): | ||||
|  | ||||
| @ -88,10 +88,10 @@ class ConfigLoader: | ||||
|             value = os.getenv(url.netloc, url.query) | ||||
|         if url.scheme == "file": | ||||
|             try: | ||||
|                 with open(url.netloc, "r") as _file: | ||||
|                 with open(url.path, "r") as _file: | ||||
|                     value = _file.read() | ||||
|             except OSError: | ||||
|                 self._log("error", f"Failed to read config value from {url.netloc}") | ||||
|                 self._log("error", f"Failed to read config value from {url.path}") | ||||
|                 value = url.query | ||||
|         return value | ||||
|  | ||||
|  | ||||
| @ -3,6 +3,7 @@ postgresql: | ||||
|   host: localhost | ||||
|   name: authentik | ||||
|   user: authentik | ||||
|   port: 5432 | ||||
|   password: 'env://POSTGRES_PASSWORD' | ||||
|  | ||||
| web: | ||||
| @ -41,7 +42,8 @@ outposts: | ||||
|   # Placeholders: | ||||
|   # %(type)s: Outpost type; proxy, ldap, etc | ||||
|   # %(version)s: Current version; 2021.4.1 | ||||
|   docker_image_base: "beryju/authentik-%(type)s:%(version)s" | ||||
|   # %(build_hash)s: Build hash if you're running a beta version | ||||
|   docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s" | ||||
|  | ||||
| authentik: | ||||
|   avatars: gravatar  # gravatar or none | ||||
|  | ||||
| @ -4,10 +4,15 @@ from typing import Optional | ||||
| from aioredis.errors import ConnectionClosedError, ReplyError | ||||
| from billiard.exceptions import WorkerLostError | ||||
| from botocore.client import ClientError | ||||
| from botocore.exceptions import BotoCoreError | ||||
| from celery.exceptions import CeleryError | ||||
| from channels.middleware import BaseMiddleware | ||||
| from channels_redis.core import ChannelFull | ||||
| from django.core.exceptions import SuspiciousOperation, ValidationError | ||||
| from django.core.exceptions import ( | ||||
|     ImproperlyConfigured, | ||||
|     SuspiciousOperation, | ||||
|     ValidationError, | ||||
| ) | ||||
| from django.db import InternalError, OperationalError, ProgrammingError | ||||
| from django.http.response import Http404 | ||||
| from django_redis.exceptions import ConnectionInterrupted | ||||
| @ -50,7 +55,8 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | ||||
|         ConnectionResetError, | ||||
|         OSError, | ||||
|         PermissionError, | ||||
|         # Django DB Errors | ||||
|         # Django Errors | ||||
|         ImproperlyConfigured, | ||||
|         OperationalError, | ||||
|         InternalError, | ||||
|         ProgrammingError, | ||||
| @ -72,6 +78,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | ||||
|         WorkerLostError, | ||||
|         CeleryError, | ||||
|         # S3 errors | ||||
|         BotoCoreError, | ||||
|         ClientError, | ||||
|         # custom baseclass | ||||
|         SentryIgnoredException, | ||||
| @ -87,6 +94,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]: | ||||
|         if isinstance(exc_value, ignored_classes): | ||||
|             return None | ||||
|     if "logger" in event: | ||||
|         if event["logger"] in ["dbbackup"]: | ||||
|         if event["logger"] in ["dbbackup", "botocore"]: | ||||
|             return None | ||||
|     return event | ||||
|  | ||||
| @ -17,7 +17,8 @@ def _get_client_ip_from_meta(meta: dict[str, Any]) -> Optional[str]: | ||||
|     ) | ||||
|     for _header in headers: | ||||
|         if _header in meta: | ||||
|             return meta.get(_header).split(", ")[0] | ||||
|             ips: list[str] = meta.get(_header).split(",") | ||||
|             return ips[0].strip() | ||||
|     return None | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -2,28 +2,6 @@ | ||||
| from django.http import HttpRequest | ||||
| from django.template.response import TemplateResponse | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from django.views.generic import CreateView | ||||
| from guardian.shortcuts import assign_perm | ||||
|  | ||||
|  | ||||
| class CreateAssignPermView(CreateView): | ||||
|     """Assign permissions to object after creation""" | ||||
|  | ||||
|     permissions = [ | ||||
|         "%s.view_%s", | ||||
|         "%s.change_%s", | ||||
|         "%s.delete_%s", | ||||
|     ] | ||||
|  | ||||
|     def form_valid(self, form): | ||||
|         response = super().form_valid(form) | ||||
|         for permission in self.permissions: | ||||
|             full_permission = permission % ( | ||||
|                 self.object._meta.app_label, | ||||
|                 self.object._meta.model_name, | ||||
|             ) | ||||
|             assign_perm(full_permission, self.request.user, self.object) | ||||
|         return response | ||||
|  | ||||
|  | ||||
| def bad_request_message( | ||||
|  | ||||
| @ -1,23 +1,33 @@ | ||||
| """Outpost API Views""" | ||||
| from dacite.core import from_dict | ||||
| from dacite.exceptions import DaciteError | ||||
| from drf_yasg.utils import swagger_auto_schema | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import BooleanField, CharField, DateTimeField | ||||
| from rest_framework.request import Request | ||||
| from rest_framework.response import Response | ||||
| from rest_framework.serializers import JSONField, ModelSerializer | ||||
| from rest_framework.serializers import JSONField, ModelSerializer, ValidationError | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.providers import ProviderSerializer | ||||
| from authentik.core.api.utils import PassiveSerializer, is_dict | ||||
| from authentik.outposts.models import Outpost, default_outpost_config | ||||
| from authentik.outposts.models import Outpost, OutpostConfig, default_outpost_config | ||||
|  | ||||
|  | ||||
| class OutpostSerializer(ModelSerializer): | ||||
|     """Outpost Serializer""" | ||||
|  | ||||
|     _config = JSONField(validators=[is_dict]) | ||||
|     config = JSONField(validators=[is_dict], source="_config") | ||||
|     providers_obj = ProviderSerializer(source="providers", many=True, read_only=True) | ||||
|  | ||||
|     def validate_config(self, config) -> dict: | ||||
|         """Check that the config has all required fields""" | ||||
|         try: | ||||
|             from_dict(OutpostConfig, config) | ||||
|         except DaciteError as exc: | ||||
|             raise ValidationError(f"Failed to validate config: {str(exc)}") from exc | ||||
|         return config | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
|         model = Outpost | ||||
| @ -29,7 +39,7 @@ class OutpostSerializer(ModelSerializer): | ||||
|             "providers_obj", | ||||
|             "service_connection", | ||||
|             "token_identifier", | ||||
|             "_config", | ||||
|             "config", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -40,7 +40,9 @@ class WebsocketMessage: | ||||
| class OutpostConsumer(AuthJsonConsumer): | ||||
|     """Handler for Outposts that connect over websockets for health checks and live updates""" | ||||
|  | ||||
|     outpost: Optional[Outpost] = None | ||||
|     outpost: Outpost | ||||
|  | ||||
|     last_uid: Optional[str] = None | ||||
|  | ||||
|     def connect(self): | ||||
|         super().connect() | ||||
| @ -52,9 +54,7 @@ class OutpostConsumer(AuthJsonConsumer): | ||||
|             raise DenyConnection() | ||||
|         self.accept() | ||||
|         self.outpost = outpost.first() | ||||
|         OutpostState( | ||||
|             uid=self.channel_name, last_seen=datetime.now(), _outpost=self.outpost | ||||
|         ).save(timeout=OUTPOST_HELLO_INTERVAL * 1.5) | ||||
|         self.last_uid = self.channel_name | ||||
|         LOGGER.debug( | ||||
|             "added outpost instace to cache", | ||||
|             outpost=self.outpost, | ||||
| @ -63,27 +63,31 @@ class OutpostConsumer(AuthJsonConsumer): | ||||
|  | ||||
|     # pylint: disable=unused-argument | ||||
|     def disconnect(self, close_code): | ||||
|         if self.outpost: | ||||
|             OutpostState.for_channel(self.outpost, self.channel_name).delete() | ||||
|         if self.outpost and self.last_uid: | ||||
|             state = OutpostState.for_instance_uid(self.outpost, self.last_uid) | ||||
|             if self.channel_name in state.channel_ids: | ||||
|                 state.channel_ids.remove(self.channel_name) | ||||
|                 state.save() | ||||
|         LOGGER.debug( | ||||
|             "removed outpost instance from cache", | ||||
|             outpost=self.outpost, | ||||
|             channel_name=self.channel_name, | ||||
|             instance_uuid=self.last_uid, | ||||
|         ) | ||||
|  | ||||
|     def receive_json(self, content: Data): | ||||
|         msg = from_dict(WebsocketMessage, content) | ||||
|         state = OutpostState( | ||||
|             uid=self.channel_name, | ||||
|             last_seen=datetime.now(), | ||||
|             _outpost=self.outpost, | ||||
|         ) | ||||
|         uid = msg.args.get("uuid", self.channel_name) | ||||
|         self.last_uid = uid | ||||
|         state = OutpostState.for_instance_uid(self.outpost, uid) | ||||
|         if self.channel_name not in state.channel_ids: | ||||
|             state.channel_ids.append(self.channel_name) | ||||
|         state.last_seen = datetime.now() | ||||
|         if msg.instruction == WebsocketMessageInstruction.HELLO: | ||||
|             state.version = msg.args.get("version", None) | ||||
|             state.build_hash = msg.args.get("buildHash", "") | ||||
|         elif msg.instruction == WebsocketMessageInstruction.ACK: | ||||
|             return | ||||
|         if state.version: | ||||
|             state.save(timeout=OUTPOST_HELLO_INTERVAL * 1.5) | ||||
|         state.save(timeout=OUTPOST_HELLO_INTERVAL * 1.5) | ||||
|  | ||||
|         response = WebsocketMessage(instruction=WebsocketMessageInstruction.ACK) | ||||
|         self.send_json(asdict(response)) | ||||
|  | ||||
| @ -1,11 +1,12 @@ | ||||
| """Base Controller""" | ||||
| from dataclasses import dataclass | ||||
| from os import environ | ||||
| from typing import Optional | ||||
|  | ||||
| from structlog.stdlib import get_logger | ||||
| from structlog.testing import capture_logs | ||||
|  | ||||
| from authentik import __version__ | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik.lib.config import CONFIG | ||||
| from authentik.lib.sentry import SentryIgnoredException | ||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||
| @ -56,6 +57,12 @@ class BaseController: | ||||
|         """Handler to delete everything we've created""" | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def down_with_logs(self) -> list[str]: | ||||
|         """Call .down() but capture all log output and return it.""" | ||||
|         with capture_logs() as logs: | ||||
|             self.down() | ||||
|         return [x["event"] for x in logs] | ||||
|  | ||||
|     def get_static_deployment(self) -> str: | ||||
|         """Return a static deployment configuration""" | ||||
|         raise NotImplementedError | ||||
| @ -63,4 +70,8 @@ class BaseController: | ||||
|     def get_container_image(self) -> str: | ||||
|         """Get container image to use for this outpost""" | ||||
|         image_name_template: str = CONFIG.y("outposts.docker_image_base") | ||||
|         return image_name_template % {"type": self.outpost.type, "version": __version__} | ||||
|         return image_name_template % { | ||||
|             "type": self.outpost.type, | ||||
|             "version": __version__, | ||||
|             "build_hash": environ.get(ENV_GIT_HASH_KEY, ""), | ||||
|         } | ||||
|  | ||||
| @ -30,11 +30,6 @@ class NeedsUpdate(ReconcileTrigger): | ||||
|     """Exception to trigger an update to the Kubernetes Object""" | ||||
|  | ||||
|  | ||||
| class Disabled(SentryIgnoredException): | ||||
|     """Exception which can be thrown in a reconciler to signal than an | ||||
|     object should not be created.""" | ||||
|  | ||||
|  | ||||
| class KubernetesObjectReconciler(Generic[T]): | ||||
|     """Base Kubernetes Reconciler, handles the basic logic.""" | ||||
|  | ||||
| @ -45,6 +40,11 @@ class KubernetesObjectReconciler(Generic[T]): | ||||
|         self.namespace = controller.outpost.config.kubernetes_namespace | ||||
|         self.logger = get_logger().bind(type=self.__class__.__name__) | ||||
|  | ||||
|     @property | ||||
|     def noop(self) -> bool: | ||||
|         """Return true if this object should not be created/updated/deleted in this cluster""" | ||||
|         return False | ||||
|  | ||||
|     @property | ||||
|     def name(self) -> str: | ||||
|         """Get the name of the object this reconciler manages""" | ||||
| @ -59,11 +59,10 @@ class KubernetesObjectReconciler(Generic[T]): | ||||
|     def up(self): | ||||
|         """Create object if it doesn't exist, update if needed or recreate if needed.""" | ||||
|         current = None | ||||
|         try: | ||||
|             reference = self.get_reference_object() | ||||
|         except Disabled: | ||||
|             self.logger.debug("Object not required") | ||||
|         if self.noop: | ||||
|             self.logger.debug("Object is noop") | ||||
|             return | ||||
|         reference = self.get_reference_object() | ||||
|         try: | ||||
|             try: | ||||
|                 current = self.retrieve() | ||||
| @ -92,11 +91,8 @@ class KubernetesObjectReconciler(Generic[T]): | ||||
|  | ||||
|     def down(self): | ||||
|         """Delete object if found""" | ||||
|         # Call self.get_reference_object to check if we even need to do anything | ||||
|         try: | ||||
|             self.get_reference_object() | ||||
|         except Disabled: | ||||
|             self.logger.debug("Object not required") | ||||
|         if self.noop: | ||||
|             self.logger.debug("Object is noop") | ||||
|             return | ||||
|         try: | ||||
|             current = self.retrieve() | ||||
|  | ||||
| @ -8,7 +8,7 @@ from structlog.testing import capture_logs | ||||
| from yaml import dump_all | ||||
|  | ||||
| from authentik.outposts.controllers.base import BaseController, ControllerException | ||||
| from authentik.outposts.controllers.k8s.base import Disabled, KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler | ||||
| from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler | ||||
| from authentik.outposts.controllers.k8s.secret import SecretReconciler | ||||
| from authentik.outposts.controllers.k8s.service import ServiceReconciler | ||||
| @ -49,6 +49,9 @@ class KubernetesController(BaseController): | ||||
|         try: | ||||
|             all_logs = [] | ||||
|             for reconcile_key in self.reconcile_order: | ||||
|                 if reconcile_key in self.outpost.config.kubernetes_disabled_components: | ||||
|                     all_logs += [f"{reconcile_key.title()}: Disabled"] | ||||
|                     continue | ||||
|                 with capture_logs() as logs: | ||||
|                     reconciler = self.reconcilers[reconcile_key](self) | ||||
|                     reconciler.up() | ||||
| @ -67,14 +70,28 @@ class KubernetesController(BaseController): | ||||
|         except ApiException as exc: | ||||
|             raise ControllerException(str(exc)) from exc | ||||
|  | ||||
|     def down_with_logs(self) -> list[str]: | ||||
|         try: | ||||
|             all_logs = [] | ||||
|             for reconcile_key in self.reconcile_order: | ||||
|                 if reconcile_key in self.outpost.config.kubernetes_disabled_components: | ||||
|                     all_logs += [f"{reconcile_key.title()}: Disabled"] | ||||
|                     continue | ||||
|                 with capture_logs() as logs: | ||||
|                     reconciler = self.reconcilers[reconcile_key](self) | ||||
|                     reconciler.down() | ||||
|                 all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs] | ||||
|             return all_logs | ||||
|         except ApiException as exc: | ||||
|             raise ControllerException(str(exc)) from exc | ||||
|  | ||||
|     def get_static_deployment(self) -> str: | ||||
|         documents = [] | ||||
|         for reconcile_key in self.reconcile_order: | ||||
|             reconciler = self.reconcilers[reconcile_key](self) | ||||
|             try: | ||||
|                 documents.append(reconciler.get_reference_object().to_dict()) | ||||
|             except Disabled: | ||||
|             if reconciler.noop: | ||||
|                 continue | ||||
|             documents.append(reconciler.get_reference_object().to_dict()) | ||||
|  | ||||
|         with StringIO() as _str: | ||||
|             dump_all( | ||||
|  | ||||
| @ -1,6 +1,7 @@ | ||||
| """Outpost models""" | ||||
| from dataclasses import asdict, dataclass, field | ||||
| from datetime import datetime | ||||
| from os import environ | ||||
| from typing import Iterable, Optional, Union | ||||
| from uuid import uuid4 | ||||
|  | ||||
| @ -26,7 +27,7 @@ from packaging.version import LegacyVersion, Version, parse | ||||
| from structlog.stdlib import get_logger | ||||
| from urllib3.exceptions import HTTPError | ||||
|  | ||||
| from authentik import __version__ | ||||
| from authentik import ENV_GIT_HASH_KEY, __version__ | ||||
| from authentik.core.models import USER_ATTRIBUTE_SA, Provider, Token, TokenIntents, User | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.config import CONFIG | ||||
| @ -42,7 +43,7 @@ LOGGER = get_logger() | ||||
|  | ||||
|  | ||||
| class ServiceConnectionInvalid(SentryIgnoredException): | ||||
|     """"Exception raised when a Service Connection has invalid parameters""" | ||||
|     """Exception raised when a Service Connection has invalid parameters""" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| @ -64,6 +65,7 @@ class OutpostConfig: | ||||
|     kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict) | ||||
|     kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls") | ||||
|     kubernetes_service_type: str = field(default="ClusterIP") | ||||
|     kubernetes_disabled_components: list[str] = field(default_factory=list) | ||||
|  | ||||
|  | ||||
| class OutpostModel(Model): | ||||
| @ -407,9 +409,11 @@ class OutpostState: | ||||
|     """Outpost instance state, last_seen and version""" | ||||
|  | ||||
|     uid: str | ||||
|     channel_ids: list[str] = field(default_factory=list) | ||||
|     last_seen: Optional[datetime] = field(default=None) | ||||
|     version: Optional[str] = field(default=None) | ||||
|     version_should: Union[Version, LegacyVersion] = field(default=OUR_VERSION) | ||||
|     build_hash: str = field(default="") | ||||
|  | ||||
|     _outpost: Optional[Outpost] = field(default=None) | ||||
|  | ||||
| @ -418,6 +422,8 @@ class OutpostState: | ||||
|         """Check if outpost version matches our version""" | ||||
|         if not self.version: | ||||
|             return False | ||||
|         if self.build_hash != environ.get(ENV_GIT_HASH_KEY, ""): | ||||
|             return False | ||||
|         return parse(self.version) < OUR_VERSION | ||||
|  | ||||
|     @staticmethod | ||||
| @ -426,21 +432,20 @@ class OutpostState: | ||||
|         keys = cache.keys(f"{outpost.state_cache_prefix}_*") | ||||
|         states = [] | ||||
|         for key in keys: | ||||
|             channel = key.replace(f"{outpost.state_cache_prefix}_", "") | ||||
|             states.append(OutpostState.for_channel(outpost, channel)) | ||||
|             instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "") | ||||
|             states.append(OutpostState.for_instance_uid(outpost, instance_uid)) | ||||
|         return states | ||||
|  | ||||
|     @staticmethod | ||||
|     def for_channel(outpost: Outpost, channel: str) -> "OutpostState": | ||||
|         """Get state for a single channel""" | ||||
|         key = f"{outpost.state_cache_prefix}_{channel}" | ||||
|         default_data = {"uid": channel} | ||||
|     def for_instance_uid(outpost: Outpost, uid: str) -> "OutpostState": | ||||
|         """Get state for a single instance""" | ||||
|         key = f"{outpost.state_cache_prefix}_{uid}" | ||||
|         default_data = {"uid": uid, "channel_ids": []} | ||||
|         data = cache.get(key, default_data) | ||||
|         if isinstance(data, str): | ||||
|             cache.delete(key) | ||||
|             data = default_data | ||||
|         state = from_dict(OutpostState, data) | ||||
|         state.uid = channel | ||||
|         # pylint: disable=protected-access | ||||
|         state._outpost = outpost | ||||
|         return state | ||||
|  | ||||
| @ -1,5 +1,5 @@ | ||||
| """authentik outpost signals""" | ||||
| from django.conf import settings | ||||
| from django.core.cache import cache | ||||
| from django.db.models import Model | ||||
| from django.db.models.signals import post_save, pre_delete, pre_save | ||||
| from django.dispatch import receiver | ||||
| @ -8,9 +8,12 @@ from structlog.stdlib import get_logger | ||||
| from authentik.core.models import Provider | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.lib.utils.reflection import class_to_path | ||||
| from authentik.outposts.controllers.base import ControllerException | ||||
| from authentik.outposts.models import Outpost, OutpostServiceConnection | ||||
| from authentik.outposts.tasks import outpost_controller_down, outpost_post_save | ||||
| from authentik.outposts.tasks import ( | ||||
|     CACHE_KEY_OUTPOST_DOWN, | ||||
|     outpost_controller, | ||||
|     outpost_post_save, | ||||
| ) | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| UPDATE_TRIGGERING_MODELS = ( | ||||
| @ -39,7 +42,8 @@ def pre_save_outpost(sender, instance: Outpost, **_): | ||||
|     ) | ||||
|     if bool(dirty): | ||||
|         LOGGER.info("Outpost needs re-deployment due to changes", instance=instance) | ||||
|         outpost_controller_down_wrapper(old_instance) | ||||
|         cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, old_instance) | ||||
|         outpost_controller.delay(instance.pk.hex, action="down", from_cache=True) | ||||
|  | ||||
|  | ||||
| @receiver(post_save) | ||||
| @ -63,23 +67,5 @@ def post_save_update(sender, instance: Model, **_): | ||||
| def pre_delete_cleanup(sender, instance: Outpost, **_): | ||||
|     """Ensure that Outpost's user is deleted (which will delete the token through cascade)""" | ||||
|     instance.user.delete() | ||||
|     outpost_controller_down_wrapper(instance) | ||||
|  | ||||
|  | ||||
| def outpost_controller_down_wrapper(instance: Outpost): | ||||
|     """To ensure that deployment is cleaned up *consistently* we call the controller, and wait | ||||
|     for it to finish. We don't want to call it in this thread, as we don't have the Outpost | ||||
|     Service connection here""" | ||||
|     try: | ||||
|         outpost_controller_down.delay(instance.pk.hex).get() | ||||
|     except RuntimeError:  # pragma: no cover | ||||
|         # In e2e/integration tests, this might run inside a thread/process and | ||||
|         # trigger the celery `Never call result.get() within a task` detection | ||||
|         if settings.TEST: | ||||
|             pass | ||||
|         else: | ||||
|             raise | ||||
|     except ControllerException as exc: | ||||
|         LOGGER.warning( | ||||
|             "failed to cleanup outpost deployment", exc=exc, instance=instance | ||||
|         ) | ||||
|     cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance) | ||||
|     outpost_controller.delay(instance.pk.hex, action="down", from_cache=True) | ||||
|  | ||||
| @ -36,6 +36,7 @@ from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesCont | ||||
| from authentik.root.celery import CELERY_APP | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s" | ||||
|  | ||||
|  | ||||
| def controller_for_outpost(outpost: Outpost) -> Optional[BaseController]: | ||||
| @ -56,13 +57,6 @@ def controller_for_outpost(outpost: Outpost) -> Optional[BaseController]: | ||||
|     return None | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def outpost_controller_all(): | ||||
|     """Launch Controller for all Outposts which support it""" | ||||
|     for outpost in Outpost.objects.exclude(service_connection=None): | ||||
|         outpost_controller.delay(outpost.pk.hex) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def outpost_service_connection_state(connection_pk: Any): | ||||
|     """Update cached state of a service connection""" | ||||
| @ -89,17 +83,31 @@ def outpost_service_connection_monitor(self: MonitoredTask): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def outpost_controller_all(): | ||||
|     """Launch Controller for all Outposts which support it""" | ||||
|     for outpost in Outpost.objects.exclude(service_connection=None): | ||||
|         outpost_controller.delay(outpost.pk.hex, "up", from_cache=False) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=MonitoredTask) | ||||
| def outpost_controller(self: MonitoredTask, outpost_pk: str): | ||||
|     """Create/update/monitor the deployment of an Outpost""" | ||||
| def outpost_controller( | ||||
|     self: MonitoredTask, outpost_pk: str, action: str = "up", from_cache: bool = False | ||||
| ): | ||||
|     """Create/update/monitor/delete the deployment of an Outpost""" | ||||
|     logs = [] | ||||
|     outpost: Outpost = Outpost.objects.get(pk=outpost_pk) | ||||
|     if from_cache: | ||||
|         outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk) | ||||
|     else: | ||||
|         outpost: Outpost = Outpost.objects.get(pk=outpost_pk) | ||||
|     if not outpost: | ||||
|         return | ||||
|     self.set_uid(slugify(outpost.name)) | ||||
|     try: | ||||
|         controller = controller_for_outpost(outpost) | ||||
|         if not controller: | ||||
|             return | ||||
|         logs = controller.up_with_logs() | ||||
|         logs = getattr(controller, f"{action}_with_logs")() | ||||
|         LOGGER.debug("---------------Outpost Controller logs starting----------------") | ||||
|         for log in logs: | ||||
|             LOGGER.debug(log) | ||||
| @ -110,16 +118,6 @@ def outpost_controller(self: MonitoredTask, outpost_pk: str): | ||||
|         self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs)) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
| def outpost_controller_down(outpost_pk: str): | ||||
|     """Delete outpost objects before deleting the DB Object""" | ||||
|     outpost = Outpost.objects.get(pk=outpost_pk) | ||||
|     controller = controller_for_outpost(outpost) | ||||
|     if not controller: | ||||
|         return | ||||
|     controller.down() | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task(bind=True, base=MonitoredTask) | ||||
| def outpost_token_ensurer(self: MonitoredTask): | ||||
|     """Periodically ensure that all Outposts have valid Service Accounts | ||||
| @ -204,8 +202,11 @@ def _outpost_single_update(outpost: Outpost, layer=None): | ||||
|     if not layer:  # pragma: no cover | ||||
|         layer = get_channel_layer() | ||||
|     for state in OutpostState.for_outpost(outpost): | ||||
|         LOGGER.debug("sending update", channel=state.uid, outpost=outpost) | ||||
|         async_to_sync(layer.send)(state.uid, {"type": "event.update"}) | ||||
|         for channel in state.channel_ids: | ||||
|             LOGGER.debug( | ||||
|                 "sending update", channel=channel, instance=state.uid, outpost=outpost | ||||
|             ) | ||||
|             async_to_sync(layer.send)(channel, {"type": "event.update"}) | ||||
|  | ||||
|  | ||||
| @CELERY_APP.task() | ||||
|  | ||||
| @ -3,6 +3,10 @@ from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import PropertyMapping, User | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.outposts.api.outposts import OutpostSerializer | ||||
| from authentik.outposts.models import default_outpost_config | ||||
| from authentik.providers.proxy.models import ProxyProvider | ||||
|  | ||||
|  | ||||
| class TestOutpostServiceConnectionsAPI(APITestCase): | ||||
| @ -22,3 +26,22 @@ class TestOutpostServiceConnectionsAPI(APITestCase): | ||||
|             reverse("authentik_api:outpostserviceconnection-types"), | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_outpost_config(self): | ||||
|         """Test Outpost's config field""" | ||||
|         provider = ProxyProvider.objects.create( | ||||
|             name="test", authorization_flow=Flow.objects.first() | ||||
|         ) | ||||
|         invalid = OutpostSerializer( | ||||
|             data={"name": "foo", "providers": [provider.pk], "config": {}} | ||||
|         ) | ||||
|         self.assertFalse(invalid.is_valid()) | ||||
|         self.assertIn("config", invalid.errors) | ||||
|         valid = OutpostSerializer( | ||||
|             data={ | ||||
|                 "name": "foo", | ||||
|                 "providers": [provider.pk], | ||||
|                 "config": default_outpost_config("foo"), | ||||
|             } | ||||
|         ) | ||||
|         self.assertTrue(valid.is_valid()) | ||||
|  | ||||
| @ -91,7 +91,7 @@ class PolicyViewSet( | ||||
|     } | ||||
|     search_fields = ["name"] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|     def get_queryset(self):  # pragma: no cover | ||||
|         return Policy.objects.select_subclasses().prefetch_related( | ||||
|             "bindings", "promptstage_set" | ||||
|         ) | ||||
|  | ||||
| @ -105,16 +105,21 @@ class PolicyEngine: | ||||
|                 if cached_policy and self.use_cache: | ||||
|                     self.logger.debug( | ||||
|                         "P_ENG: Taking result from cache", | ||||
|                         policy=binding.policy, | ||||
|                         binding=binding, | ||||
|                         cache_key=key, | ||||
|                         request=self.request, | ||||
|                     ) | ||||
|                     self.__cached_policies.append(cached_policy) | ||||
|                     continue | ||||
|                 self.logger.debug("P_ENG: Evaluating policy", policy=binding.policy) | ||||
|                 self.logger.debug( | ||||
|                     "P_ENG: Evaluating policy", binding=binding, request=self.request | ||||
|                 ) | ||||
|                 our_end, task_end = Pipe(False) | ||||
|                 task = PolicyProcess(binding, self.request, task_end) | ||||
|                 task.daemon = False | ||||
|                 self.logger.debug("P_ENG: Starting Process", policy=binding.policy) | ||||
|                 self.logger.debug( | ||||
|                     "P_ENG: Starting Process", binding=binding, request=self.request | ||||
|                 ) | ||||
|                 if not CURRENT_PROCESS._config.get("daemon"): | ||||
|                     task.run() | ||||
|                 else: | ||||
|  | ||||
| @ -51,7 +51,12 @@ class PolicyRequest: | ||||
|             LOGGER.warning("failed to get geoip data", exc=exc) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return f"<PolicyRequest user={self.user}>" | ||||
|         text = f"<PolicyRequest user={self.user}" | ||||
|         if self.obj: | ||||
|             text += f" obj={self.obj}" | ||||
|         if self.http_request: | ||||
|             text += f" http_request={self.http_request}" | ||||
|         return text + ">" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
|  | ||||
| @ -6,13 +6,11 @@ import time | ||||
| from dataclasses import asdict, dataclass, field | ||||
| from datetime import datetime | ||||
| from hashlib import sha256 | ||||
| from typing import Any, Optional, Type, Union | ||||
| from typing import Any, Optional, Type | ||||
| from urllib.parse import urlparse | ||||
| from uuid import uuid4 | ||||
|  | ||||
| from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey | ||||
| from dacite import from_dict | ||||
| from django.conf import settings | ||||
| from django.db import models | ||||
| from django.http import HttpRequest | ||||
| from django.utils import dateformat, timezone | ||||
| @ -239,7 +237,7 @@ class OAuth2Provider(Provider): | ||||
|         token.access_token = token.create_access_token(user, request) | ||||
|         return token | ||||
|  | ||||
|     def get_jwt_keys(self) -> Union[RSAPrivateKey, str]: | ||||
|     def get_jwt_key(self) -> str: | ||||
|         """ | ||||
|         Takes a provider and returns the set of keys associated with it. | ||||
|         Returns a list of keys. | ||||
| @ -256,7 +254,7 @@ class OAuth2Provider(Provider): | ||||
|                 self.jwt_alg = JWTAlgorithms.HS256 | ||||
|                 self.save() | ||||
|             else: | ||||
|                 return self.rsa_key.private_key | ||||
|                 return self.rsa_key.key_data | ||||
|  | ||||
|         if self.jwt_alg == JWTAlgorithms.HS256: | ||||
|             return self.client_secret | ||||
| @ -300,11 +298,14 @@ class OAuth2Provider(Provider): | ||||
|  | ||||
|     def encode(self, payload: dict[str, Any]) -> str: | ||||
|         """Represent the ID Token as a JSON Web Token (JWT).""" | ||||
|         key = self.get_jwt_keys() | ||||
|         headers = {} | ||||
|         if self.rsa_key: | ||||
|             headers["kid"] = self.rsa_key.kid | ||||
|         key = self.get_jwt_key() | ||||
|         # If the provider does not have an RSA Key assigned, it was switched to Symmetric | ||||
|         self.refresh_from_db() | ||||
|         # pyright: reportGeneralTypeIssues=false | ||||
|         return encode(payload, key, algorithm=self.jwt_alg) | ||||
|         return encode(payload, key, algorithm=self.jwt_alg, headers=headers) | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
| @ -457,7 +458,7 @@ class RefreshToken(ExpiringModel, BaseGrantModel): | ||||
|         See: http://openid.net/specs/openid-connect-core-1_0.html#IDToken""" | ||||
|         sub = "" | ||||
|         if self.provider.sub_mode == SubModes.HASHED_USER_ID: | ||||
|             sub = sha256(f"{user.id}-{settings.SECRET_KEY}".encode("ascii")).hexdigest() | ||||
|             sub = user.uid | ||||
|         elif self.provider.sub_mode == SubModes.USER_EMAIL: | ||||
|             sub = user.email | ||||
|         elif self.provider.sub_mode == SubModes.USER_USERNAME: | ||||
|  | ||||
| @ -4,6 +4,7 @@ from django.urls import reverse | ||||
| from django.utils.encoding import force_str | ||||
|  | ||||
| from authentik.core.models import Application, User | ||||
| from authentik.crypto.models import CertificateKeyPair | ||||
| from authentik.flows.challenge import ChallengeTypes | ||||
| from authentik.flows.models import Flow | ||||
| from authentik.providers.oauth2.errors import ( | ||||
| @ -207,6 +208,7 @@ class TestAuthorize(OAuthTestCase): | ||||
|             client_secret=generate_client_secret(), | ||||
|             authorization_flow=flow, | ||||
|             redirect_uris="http://localhost", | ||||
|             rsa_key=CertificateKeyPair.objects.first(), | ||||
|         ) | ||||
|         Application.objects.create(name="app", slug="app", provider=provider) | ||||
|         state = generate_client_id() | ||||
|  | ||||
| @ -2,7 +2,11 @@ | ||||
| from django.test import TestCase | ||||
| from jwt import decode | ||||
|  | ||||
| from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken | ||||
| from authentik.providers.oauth2.models import ( | ||||
|     JWTAlgorithms, | ||||
|     OAuth2Provider, | ||||
|     RefreshToken, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class OAuthTestCase(TestCase): | ||||
| @ -19,9 +23,12 @@ class OAuthTestCase(TestCase): | ||||
|  | ||||
|     def validate_jwt(self, token: RefreshToken, provider: OAuth2Provider): | ||||
|         """Validate that all required fields are set""" | ||||
|         key = provider.client_secret | ||||
|         if provider.jwt_alg == JWTAlgorithms.RS256: | ||||
|             key = provider.rsa_key.public_key | ||||
|         jwt = decode( | ||||
|             token.access_token, | ||||
|             provider.client_secret, | ||||
|             key, | ||||
|             algorithms=[provider.jwt_alg], | ||||
|             audience=provider.client_id, | ||||
|         ) | ||||
|  | ||||
| @ -54,6 +54,7 @@ from authentik.stages.consent.stage import ( | ||||
|     PLAN_CONTEXT_CONSENT_PERMISSIONS, | ||||
|     ConsentStageView, | ||||
| ) | ||||
| from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED | ||||
|  | ||||
| LOGGER = get_logger() | ||||
|  | ||||
| @ -437,6 +438,10 @@ class AuthorizationFlowInitView(PolicyAccessView): | ||||
|         if ( | ||||
|             PROMPT_LOGIN in self.params.prompt | ||||
|             and SESSION_NEEDS_LOGIN not in self.request.session | ||||
|             # To prevent the user from having to double login when prompt is set to login | ||||
|             # and the user has just signed it. This session variable is set in the UserLoginStage | ||||
|             # and is (quite hackily) removed from the session in applications's API's List method | ||||
|             and USER_LOGIN_AUTHENTICATED not in self.request.session | ||||
|         ): | ||||
|             self.request.session[SESSION_NEEDS_LOGIN] = True | ||||
|             return self.handle_no_permission() | ||||
|  | ||||
| @ -33,6 +33,8 @@ class OpenIDConnectConfigurationSerializer(PassiveSerializer): | ||||
| class ProxyProviderSerializer(ProviderSerializer): | ||||
|     """ProxyProvider Serializer""" | ||||
|  | ||||
|     redirect_uris = CharField(read_only=True) | ||||
|  | ||||
|     def validate(self, attrs) -> dict[Any, str]: | ||||
|         """Check that internal_host is set when forward_auth_mode is disabled""" | ||||
|         if ( | ||||
| @ -51,8 +53,10 @@ class ProxyProviderSerializer(ProviderSerializer): | ||||
|         return instance | ||||
|  | ||||
|     def update(self, instance: ProxyProvider, validated_data): | ||||
|         instance = super().update(instance, validated_data) | ||||
|         instance.set_oauth_defaults() | ||||
|         return super().update(instance, validated_data) | ||||
|         instance.save() | ||||
|         return instance | ||||
|  | ||||
|     class Meta: | ||||
|  | ||||
| @ -67,6 +71,7 @@ class ProxyProviderSerializer(ProviderSerializer): | ||||
|             "basic_auth_password_attribute", | ||||
|             "basic_auth_user_attribute", | ||||
|             "forward_auth_mode", | ||||
|             "redirect_uris", | ||||
|         ] | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -17,7 +17,6 @@ from kubernetes.client.models.networking_v1beta1_ingress_rule import ( | ||||
|  | ||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | ||||
| from authentik.outposts.controllers.k8s.base import ( | ||||
|     Disabled, | ||||
|     KubernetesObjectReconciler, | ||||
|     NeedsUpdate, | ||||
| ) | ||||
| @ -137,9 +136,6 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]): | ||||
|                     ), | ||||
|                 ) | ||||
|             rules.append(rule) | ||||
|         if not rules: | ||||
|             self.logger.debug("No providers use proxying, no ingress needed") | ||||
|             raise Disabled() | ||||
|         tls_config = None | ||||
|         if tls_hosts: | ||||
|             tls_config = NetworkingV1beta1IngressTLS( | ||||
|  | ||||
| @ -7,7 +7,6 @@ from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi | ||||
|  | ||||
| from authentik.outposts.controllers.base import FIELD_MANAGER | ||||
| from authentik.outposts.controllers.k8s.base import ( | ||||
|     Disabled, | ||||
|     KubernetesObjectReconciler, | ||||
|     NeedsUpdate, | ||||
| ) | ||||
| @ -70,6 +69,19 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware]) | ||||
|         self.api_ex = ApiextensionsV1Api(controller.client) | ||||
|         self.api = CustomObjectsApi(controller.client) | ||||
|  | ||||
|     @property | ||||
|     def noop(self) -> bool: | ||||
|         if not ProxyProvider.objects.filter( | ||||
|             outpost__in=[self.controller.outpost], | ||||
|             forward_auth_mode=True, | ||||
|         ).exists(): | ||||
|             self.logger.debug("No providers with forward auth enabled.") | ||||
|             return True | ||||
|         if not self._crd_exists(): | ||||
|             self.logger.debug("CRD doesn't exist") | ||||
|             return True | ||||
|         return False | ||||
|  | ||||
|     def _crd_exists(self) -> bool: | ||||
|         """Check if the traefik middleware exists""" | ||||
|         return bool( | ||||
| @ -87,15 +99,6 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware]) | ||||
|  | ||||
|     def get_reference_object(self) -> TraefikMiddleware: | ||||
|         """Get deployment object for outpost""" | ||||
|         if not ProxyProvider.objects.filter( | ||||
|             outpost__in=[self.controller.outpost], | ||||
|             forward_auth_mode=True, | ||||
|         ).exists(): | ||||
|             self.logger.debug("No providers with forward auth enabled.") | ||||
|             raise Disabled() | ||||
|         if not self._crd_exists(): | ||||
|             self.logger.debug("CRD doesn't exist") | ||||
|             raise Disabled() | ||||
|         return TraefikMiddleware( | ||||
|             apiVersion=f"{CRD_GROUP}/{CRD_VERSION}", | ||||
|             kind="Middleware", | ||||
|  | ||||
| @ -127,7 +127,7 @@ class ProxyProvider(OutpostModel, OAuth2Provider): | ||||
|         """Ensure all OAuth2-related settings are correct""" | ||||
|         self.client_type = ClientTypes.CONFIDENTIAL | ||||
|         self.jwt_alg = JWTAlgorithms.RS256 | ||||
|         self.rsa_key = CertificateKeyPair.objects.first() | ||||
|         self.rsa_key = CertificateKeyPair.objects.exclude(key_data__iexact="").first() | ||||
|         scopes = ScopeMapping.objects.filter( | ||||
|             scope_name__in=[ | ||||
|                 SCOPE_OPENID, | ||||
|  | ||||
| @ -5,6 +5,7 @@ from defusedxml.ElementTree import fromstring | ||||
| from django.http.response import HttpResponse | ||||
| from django.shortcuts import get_object_or_404 | ||||
| from django.utils.translation import gettext_lazy as _ | ||||
| from drf_yasg import openapi | ||||
| from drf_yasg.utils import swagger_auto_schema | ||||
| from rest_framework.decorators import action | ||||
| from rest_framework.fields import CharField, FileField, ReadOnlyField | ||||
| @ -83,7 +84,14 @@ class SAMLProviderViewSet(ModelViewSet): | ||||
|         responses={ | ||||
|             200: SAMLMetadataSerializer(many=False), | ||||
|             404: "Provider has no application assigned", | ||||
|         } | ||||
|         }, | ||||
|         manual_parameters=[ | ||||
|             openapi.Parameter( | ||||
|                 name="download", | ||||
|                 in_=openapi.IN_QUERY, | ||||
|                 type=openapi.TYPE_BOOLEAN, | ||||
|             ) | ||||
|         ], | ||||
|     ) | ||||
|     @action(methods=["GET"], detail=True, permission_classes=[AllowAny]) | ||||
|     # pylint: disable=invalid-name, unused-argument | ||||
|  | ||||
| @ -23,7 +23,7 @@ def deflate_and_base64_encode(inflated: str, encoding="utf-8"): | ||||
|  | ||||
|  | ||||
| def nice64(src: str) -> str: | ||||
|     """Returns src base64-encoded and formatted nicely for our XML. """ | ||||
|     """Returns src base64-encoded and formatted nicely for our XML.""" | ||||
|     return base64.b64encode(src.encode()).decode("utf-8").replace("\n", "") | ||||
|  | ||||
|  | ||||
|  | ||||
| @ -20,6 +20,7 @@ from time import time | ||||
| import structlog | ||||
| from celery.schedules import crontab | ||||
| from sentry_sdk import init as sentry_init | ||||
| from sentry_sdk.api import set_tag | ||||
| from sentry_sdk.integrations.celery import CeleryIntegration | ||||
| from sentry_sdk.integrations.django import DjangoIntegration | ||||
| from sentry_sdk.integrations.redis import RedisIntegration | ||||
| @ -52,11 +53,9 @@ STATIC_ROOT = BASE_DIR + "/static" | ||||
| STATICFILES_DIRS = [BASE_DIR + "/web"] | ||||
| MEDIA_ROOT = BASE_DIR + "/media" | ||||
|  | ||||
| SECRET_KEY = CONFIG.y( | ||||
|     "secret_key", "9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s" | ||||
| )  # noqa Debug | ||||
|  | ||||
| DEBUG = CONFIG.y_bool("debug") | ||||
| SECRET_KEY = CONFIG.y("secret_key") | ||||
|  | ||||
| INTERNAL_IPS = ["127.0.0.1"] | ||||
| ALLOWED_HOSTS = ["*"] | ||||
| SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") | ||||
| @ -162,7 +161,7 @@ REST_FRAMEWORK = { | ||||
|         "rest_framework.permissions.DjangoObjectPermissions", | ||||
|     ), | ||||
|     "DEFAULT_AUTHENTICATION_CLASSES": ( | ||||
|         "authentik.api.auth.AuthentikTokenAuthentication", | ||||
|         "authentik.api.authentication.TokenAuthentication", | ||||
|         "rest_framework.authentication.SessionAuthentication", | ||||
|     ), | ||||
|     "DEFAULT_RENDERER_CLASSES": [ | ||||
| @ -248,6 +247,7 @@ DATABASES = { | ||||
|         "NAME": CONFIG.y("postgresql.name"), | ||||
|         "USER": CONFIG.y("postgresql.user"), | ||||
|         "PASSWORD": CONFIG.y("postgresql.password"), | ||||
|         "PORT": int(CONFIG.y("postgresql.port")), | ||||
|     } | ||||
| } | ||||
|  | ||||
| @ -319,9 +319,7 @@ CELERY_RESULT_BACKEND = ( | ||||
| # Database backup | ||||
| DBBACKUP_STORAGE = "django.core.files.storage.FileSystemStorage" | ||||
| DBBACKUP_STORAGE_OPTIONS = {"location": "./backups" if DEBUG else "/backups"} | ||||
| DBBACKUP_CONNECTOR_MAPPING = { | ||||
|     "django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector" | ||||
| } | ||||
| DBBACKUP_FILENAME_TEMPLATE = "authentik-backup-{datetime}.sql" | ||||
| if CONFIG.y("postgresql.s3_backup"): | ||||
|     DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" | ||||
|     DBBACKUP_STORAGE_OPTIONS = { | ||||
| @ -331,9 +329,10 @@ if CONFIG.y("postgresql.s3_backup"): | ||||
|         "region_name": CONFIG.y("postgresql.s3_backup.region", "eu-central-1"), | ||||
|         "default_acl": "private", | ||||
|         "endpoint_url": CONFIG.y("postgresql.s3_backup.host"), | ||||
|         "location": CONFIG.y("postgresql.s3_backup.location", ""), | ||||
|     } | ||||
|     j_print( | ||||
|         "Database backup to S3 is configured.", | ||||
|         "Database backup to S3 is configured", | ||||
|         host=CONFIG.y("postgresql.s3_backup.host"), | ||||
|     ) | ||||
|  | ||||
| @ -354,8 +353,13 @@ if _ERROR_REPORTING: | ||||
|         environment=CONFIG.y("error_reporting.environment", "customer"), | ||||
|         send_default_pii=CONFIG.y_bool("error_reporting.send_pii", False), | ||||
|     ) | ||||
|     set_tag("authentik:build_hash", os.environ.get(ENV_GIT_HASH_KEY, "tagged")) | ||||
|     set_tag( | ||||
|         "authentik:env", "kubernetes" if "KUBERNETES_PORT" in os.environ else "compose" | ||||
|     ) | ||||
|     set_tag("authentik:component", "backend") | ||||
|     j_print( | ||||
|         "Error reporting is enabled.", | ||||
|         "Error reporting is enabled", | ||||
|         env=CONFIG.y("error_reporting.environment", "customer"), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @ -1,9 +1,10 @@ | ||||
| """OAuth Source Serializer""" | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from guardian.utils import get_anonymous_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
| from rest_framework.viewsets import GenericViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerPermissions | ||||
| from authentik.core.api.sources import SourceSerializer | ||||
| from authentik.sources.oauth.models import UserOAuthSourceConnection | ||||
|  | ||||
| @ -21,20 +22,17 @@ class UserOAuthSourceConnectionSerializer(SourceSerializer): | ||||
|         ] | ||||
|  | ||||
|  | ||||
| class UserOAuthSourceConnectionViewSet(ModelViewSet): | ||||
| class UserOAuthSourceConnectionViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """Source Viewset""" | ||||
|  | ||||
|     queryset = UserOAuthSourceConnection.objects.all() | ||||
|     serializer_class = UserOAuthSourceConnectionSerializer | ||||
|     filterset_fields = ["source__slug"] | ||||
|     filter_backends = [ | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         if user.is_superuser: | ||||
|             return super().get_queryset() | ||||
|         return super().get_queryset().filter(user=user.pk) | ||||
|     permission_classes = [OwnerPermissions] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
| @ -34,7 +34,6 @@ class PlexSourceSerializer(SourceSerializer): | ||||
|             "allow_friends", | ||||
|             "plex_token", | ||||
|         ] | ||||
|         extra_kwargs = {"plex_token": {"write_only": True}} | ||||
|  | ||||
|  | ||||
| class PlexTokenRedeemSerializer(PassiveSerializer): | ||||
|  | ||||
| @ -0,0 +1,18 @@ | ||||
| # Generated by Django 3.2.3 on 2021-05-20 17:04 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("authentik_sources_plex", "0002_auto_20210505_1717"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="plexsource", | ||||
|             name="plex_token", | ||||
|             field=models.TextField(help_text="Plex token used to check firends"), | ||||
|         ), | ||||
|     ] | ||||
| @ -41,9 +41,7 @@ class PlexSource(Source): | ||||
|         default=True, | ||||
|         help_text=_("Allow friends to authenticate, even if you don't share a server."), | ||||
|     ) | ||||
|     plex_token = models.TextField( | ||||
|         default="", help_text=_("Plex token used to check firends") | ||||
|     ) | ||||
|     plex_token = models.TextField(help_text=_("Plex token used to check firends")) | ||||
|  | ||||
|     @property | ||||
|     def component(self) -> str: | ||||
|  | ||||
| @ -1,13 +1,13 @@ | ||||
| """AuthenticatorStaticStage API Views""" | ||||
| from django_filters import OrderingFilter | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from django_otp.plugins.otp_static.models import StaticDevice | ||||
| from guardian.utils import get_anonymous_user | ||||
| from rest_framework.filters import SearchFilter | ||||
| from rest_framework import mixins | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.permissions import IsAdminUser | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||
| from rest_framework.viewsets import GenericViewSet, ModelViewSet, ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerPermissions | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
| from authentik.stages.authenticator_static.models import AuthenticatorStaticStage | ||||
|  | ||||
| @ -38,23 +38,22 @@ class StaticDeviceSerializer(ModelSerializer): | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class StaticDeviceViewSet(ModelViewSet): | ||||
| class StaticDeviceViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """Viewset for static authenticator devices""" | ||||
|  | ||||
|     queryset = StaticDevice.objects.none() | ||||
|     queryset = StaticDevice.objects.all() | ||||
|     serializer_class = StaticDeviceSerializer | ||||
|     permission_classes = [OwnerPermissions] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     search_fields = ["name"] | ||||
|     filterset_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
|     filter_backends = [ | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         return StaticDevice.objects.filter(user=user.pk) | ||||
|  | ||||
|  | ||||
| class StaticAdminDeviceViewSet(ReadOnlyModelViewSet): | ||||
|  | ||||
							
								
								
									
										20
									
								
								authentik/stages/authenticator_static/tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								authentik/stages/authenticator_static/tests.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,20 @@ | ||||
| """Test Static API""" | ||||
| from django.urls import reverse | ||||
| from django_otp.plugins.otp_static.models import StaticDevice | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
|  | ||||
|  | ||||
| class AuthenticatorStaticStage(APITestCase): | ||||
|     """Test Static API""" | ||||
|  | ||||
|     def test_api_delete(self): | ||||
|         """Test api delete""" | ||||
|         user = User.objects.create(username="foo") | ||||
|         self.client.force_login(user) | ||||
|         dev = StaticDevice.objects.create(user=user) | ||||
|         response = self.client.delete( | ||||
|             reverse("authentik_api:staticdevice-detail", kwargs={"pk": dev.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
| @ -1,12 +1,13 @@ | ||||
| """AuthenticatorTOTPStage API Views""" | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from django_filters.rest_framework.backends import DjangoFilterBackend | ||||
| from django_otp.plugins.otp_totp.models import TOTPDevice | ||||
| from guardian.utils import get_anonymous_user | ||||
| from rest_framework import mixins | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.permissions import IsAdminUser | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||
| from rest_framework.viewsets import GenericViewSet, ModelViewSet, ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerPermissions | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
| from authentik.stages.authenticator_totp.models import AuthenticatorTOTPStage | ||||
|  | ||||
| @ -40,23 +41,22 @@ class TOTPDeviceSerializer(ModelSerializer): | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class TOTPDeviceViewSet(ModelViewSet): | ||||
| class TOTPDeviceViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """Viewset for totp authenticator devices""" | ||||
|  | ||||
|     queryset = TOTPDevice.objects.none() | ||||
|     queryset = TOTPDevice.objects.all() | ||||
|     serializer_class = TOTPDeviceSerializer | ||||
|     permission_classes = [OwnerPermissions] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|     search_fields = ["name"] | ||||
|     filterset_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
|     filter_backends = [ | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         return TOTPDevice.objects.filter(user=user.pk) | ||||
|  | ||||
|  | ||||
| class TOTPAdminDeviceViewSet(ReadOnlyModelViewSet): | ||||
|  | ||||
							
								
								
									
										20
									
								
								authentik/stages/authenticator_totp/tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								authentik/stages/authenticator_totp/tests.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,20 @@ | ||||
| """Test TOTP API""" | ||||
| from django.urls import reverse | ||||
| from django_otp.plugins.otp_totp.models import TOTPDevice | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
|  | ||||
|  | ||||
| class AuthenticatorTOTPStage(APITestCase): | ||||
|     """Test TOTP API""" | ||||
|  | ||||
|     def test_api_delete(self): | ||||
|         """Test api delete""" | ||||
|         user = User.objects.create(username="foo") | ||||
|         self.client.force_login(user) | ||||
|         dev = TOTPDevice.objects.create(user=user) | ||||
|         response = self.client.delete( | ||||
|             reverse("authentik_api:totpdevice-detail", kwargs={"pk": dev.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
| @ -1,11 +1,12 @@ | ||||
| """AuthenticateWebAuthnStage API Views""" | ||||
| from django_filters.rest_framework import DjangoFilterBackend | ||||
| from guardian.utils import get_anonymous_user | ||||
| from django_filters.rest_framework.backends import DjangoFilterBackend | ||||
| from rest_framework import mixins | ||||
| from rest_framework.filters import OrderingFilter, SearchFilter | ||||
| from rest_framework.permissions import IsAdminUser | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet | ||||
| from rest_framework.viewsets import GenericViewSet, ModelViewSet, ReadOnlyModelViewSet | ||||
|  | ||||
| from authentik.api.authorization import OwnerFilter, OwnerPermissions | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
| from authentik.stages.authenticator_webauthn.models import ( | ||||
|     AuthenticateWebAuthnStage, | ||||
| @ -39,23 +40,22 @@ class WebAuthnDeviceSerializer(ModelSerializer): | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class WebAuthnDeviceViewSet(ModelViewSet): | ||||
| class WebAuthnDeviceViewSet( | ||||
|     mixins.RetrieveModelMixin, | ||||
|     mixins.UpdateModelMixin, | ||||
|     mixins.DestroyModelMixin, | ||||
|     mixins.ListModelMixin, | ||||
|     GenericViewSet, | ||||
| ): | ||||
|     """Viewset for WebAuthn authenticator devices""" | ||||
|  | ||||
|     queryset = WebAuthnDevice.objects.none() | ||||
|     queryset = WebAuthnDevice.objects.all() | ||||
|     serializer_class = WebAuthnDeviceSerializer | ||||
|     search_fields = ["name"] | ||||
|     filterset_fields = ["name"] | ||||
|     ordering = ["name"] | ||||
|     filter_backends = [ | ||||
|         DjangoFilterBackend, | ||||
|         OrderingFilter, | ||||
|         SearchFilter, | ||||
|     ] | ||||
|  | ||||
|     def get_queryset(self): | ||||
|         user = self.request.user if self.request else get_anonymous_user() | ||||
|         return WebAuthnDevice.objects.filter(user=user.pk) | ||||
|     permission_classes = [OwnerPermissions] | ||||
|     filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter] | ||||
|  | ||||
|  | ||||
| class WebAuthnAdminDeviceViewSet(ReadOnlyModelViewSet): | ||||
|  | ||||
							
								
								
									
										20
									
								
								authentik/stages/authenticator_webauthn/tests.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								authentik/stages/authenticator_webauthn/tests.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,20 @@ | ||||
| """Test WebAuthn API""" | ||||
| from django.urls import reverse | ||||
| from rest_framework.test import APITestCase | ||||
|  | ||||
| from authentik.core.models import User | ||||
| from authentik.stages.authenticator_webauthn.models import WebAuthnDevice | ||||
|  | ||||
|  | ||||
| class AuthenticatorWebAuthnStage(APITestCase): | ||||
|     """Test WebAuthn API""" | ||||
|  | ||||
|     def test_api_delete(self): | ||||
|         """Test api delete""" | ||||
|         user = User.objects.create(username="foo") | ||||
|         self.client.force_login(user) | ||||
|         dev = WebAuthnDevice.objects.create(user=user) | ||||
|         response = self.client.delete( | ||||
|             reverse("authentik_api:webauthndevice-detail", kwargs={"pk": dev.pk}) | ||||
|         ) | ||||
|         self.assertEqual(response.status_code, 204) | ||||
| @ -1,5 +1,5 @@ | ||||
| """dummy tests""" | ||||
| from django.test import Client, TestCase | ||||
| from django.test import TestCase | ||||
| from django.urls import reverse | ||||
| from django.utils.encoding import force_str | ||||
|  | ||||
| @ -14,7 +14,6 @@ class TestDummyStage(TestCase): | ||||
|     def setUp(self): | ||||
|         super().setUp() | ||||
|         self.user = User.objects.create(username="unittest", email="test@beryju.org") | ||||
|         self.client = Client() | ||||
|  | ||||
|         self.flow = Flow.objects.create( | ||||
|             name="test-dummy", | ||||
|  | ||||
| @ -3,6 +3,7 @@ from rest_framework.fields import JSONField | ||||
| from rest_framework.serializers import ModelSerializer | ||||
| from rest_framework.viewsets import ModelViewSet | ||||
|  | ||||
| from authentik.core.api.users import UserSerializer | ||||
| from authentik.core.api.utils import is_dict | ||||
| from authentik.flows.api.stages import StageSerializer | ||||
| from authentik.stages.invitation.models import Invitation, InvitationStage | ||||
| @ -29,6 +30,7 @@ class InvitationStageViewSet(ModelViewSet): | ||||
| class InvitationSerializer(ModelSerializer): | ||||
|     """Invitation Serializer""" | ||||
|  | ||||
|     created_by = UserSerializer(read_only=True) | ||||
|     fixed_data = JSONField(validators=[is_dict], required=False) | ||||
|  | ||||
|     class Meta: | ||||
| @ -41,7 +43,6 @@ class InvitationSerializer(ModelSerializer): | ||||
|             "created_by", | ||||
|             "single_use", | ||||
|         ] | ||||
|         depth = 2 | ||||
|  | ||||
|  | ||||
| class InvitationViewSet(ModelViewSet): | ||||
|  | ||||
| @ -12,6 +12,7 @@ from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND | ||||
|  | ||||
| LOGGER = get_logger() | ||||
| DEFAULT_BACKEND = "django.contrib.auth.backends.ModelBackend" | ||||
| USER_LOGIN_AUTHENTICATED = "user_login_authenticated" | ||||
|  | ||||
|  | ||||
| class UserLoginStageView(StageView): | ||||
| @ -33,7 +34,7 @@ class UserLoginStageView(StageView): | ||||
|             backend=backend, | ||||
|         ) | ||||
|         delta = timedelta_from_string(self.executor.current_stage.session_duration) | ||||
|         if delta.seconds == 0: | ||||
|         if delta.total_seconds() == 0: | ||||
|             self.request.session.set_expiry(0) | ||||
|         else: | ||||
|             self.request.session.set_expiry(delta) | ||||
| @ -43,5 +44,6 @@ class UserLoginStageView(StageView): | ||||
|             flow_slug=self.executor.flow.slug, | ||||
|             session_duration=self.executor.current_stage.session_duration, | ||||
|         ) | ||||
|         self.request.session[USER_LOGIN_AUTHENTICATED] = True | ||||
|         messages.success(self.request, _("Successfully logged in!")) | ||||
|         return self.executor.stage_ok() | ||||
|  | ||||
| @ -19,7 +19,7 @@ variables: | ||||
|     branchName: ${{ replace(variables['Build.SourceBranchName'], 'refs/heads/', '') }} | ||||
|  | ||||
| stages: | ||||
|   - stage: Lint | ||||
|   - stage: Lint_and_test | ||||
|     jobs: | ||||
|       - job: pylint | ||||
|         pool: | ||||
| @ -43,7 +43,9 @@ stages: | ||||
|                 pipenv install --dev | ||||
|           - task: CmdLine@2 | ||||
|             inputs: | ||||
|               script: pipenv run pylint authentik tests lifecycle | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run pylint authentik tests lifecycle | ||||
|       - job: black | ||||
|         pool: | ||||
|           vmImage: 'ubuntu-latest' | ||||
| @ -118,8 +120,6 @@ stages: | ||||
|           - task: CmdLine@2 | ||||
|             inputs: | ||||
|               script: pipenv run pyright e2e lifecycle | ||||
|   - stage: Test | ||||
|     jobs: | ||||
|       - job: migrations | ||||
|         pool: | ||||
|           vmImage: 'ubuntu-latest' | ||||
| @ -142,7 +142,9 @@ stages: | ||||
|                 pipenv install --dev | ||||
|           - task: CmdLine@2 | ||||
|             inputs: | ||||
|               script: pipenv run ./manage.py migrate | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run ./manage.py migrate | ||||
|       - job: migrations_from_previous_release | ||||
|         pool: | ||||
|           vmImage: 'ubuntu-latest' | ||||
| @ -173,8 +175,9 @@ stages: | ||||
|           - task: CmdLine@2 | ||||
|             displayName: Migrate to last tagged release | ||||
|             inputs: | ||||
|               script: | ||||
|                 pipenv run ./manage.py migrate | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run python -m lifecycle.migrate | ||||
|           - task: CmdLine@2 | ||||
|             displayName: Install current branch | ||||
|             inputs: | ||||
| @ -186,8 +189,8 @@ stages: | ||||
|             displayName: Migrate to current branch | ||||
|             inputs: | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run python -m lifecycle.migrate | ||||
|                 pipenv run ./manage.py migrate | ||||
|       - job: coverage_unittest | ||||
|         pool: | ||||
|           vmImage: 'ubuntu-latest' | ||||
| @ -212,6 +215,7 @@ stages: | ||||
|             displayName: Run full test suite | ||||
|             inputs: | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run make test | ||||
|           - task: CmdLine@2 | ||||
|             inputs: | ||||
| @ -255,6 +259,7 @@ stages: | ||||
|             displayName: Run full test suite | ||||
|             inputs: | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run make test-integration | ||||
|           - task: CmdLine@2 | ||||
|             inputs: | ||||
| @ -310,6 +315,7 @@ stages: | ||||
|             displayName: Run full test suite | ||||
|             inputs: | ||||
|               script: | | ||||
|                 pipenv run python -m scripts.generate_ci_config | ||||
|                 pipenv run make test-e2e | ||||
|           - task: CmdLine@2 | ||||
|             condition: always() | ||||
|  | ||||
| @ -21,7 +21,7 @@ services: | ||||
|     networks: | ||||
|       - internal | ||||
|   server: | ||||
|     image: ${AUTHENTIK_IMAGE:-beryju/authentik}:${AUTHENTIK_TAG:-2021.5.1-rc6} | ||||
|     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2021.5.4} | ||||
|     restart: unless-stopped | ||||
|     command: server | ||||
|     environment: | ||||
| @ -52,7 +52,7 @@ services: | ||||
|       - "0.0.0.0:9000:9000" | ||||
|       - "0.0.0.0:9443:9443" | ||||
|   worker: | ||||
|     image: ${AUTHENTIK_IMAGE:-beryju/authentik}:${AUTHENTIK_TAG:-2021.5.1-rc6} | ||||
|     image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2021.5.4} | ||||
|     restart: unless-stopped | ||||
|     command: worker | ||||
|     networks: | ||||
| @ -64,8 +64,13 @@ services: | ||||
|       AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik} | ||||
|       AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS} | ||||
|       # AUTHENTIK_ERROR_REPORTING__ENABLED: "true" | ||||
|     # This is optional, and can be removed. If you remove this, the following will happen | ||||
|     # - The permissions for the /backups and /media folders aren't fixed, so make sure they are 1000:1000 | ||||
|     # - The docker socket can't be accessed anymore | ||||
|     user: root | ||||
|     volumes: | ||||
|       - ./backups:/backups | ||||
|       - ./media:/media | ||||
|       - /var/run/docker.sock:/var/run/docker.sock | ||||
|       - ./custom-templates:/templates | ||||
|       - geoip:/geoip | ||||
|  | ||||
| @ -1,3 +1,3 @@ | ||||
| package constants | ||||
|  | ||||
| const VERSION = "2021.5.1-rc6" | ||||
| const VERSION = "2021.5.4" | ||||
|  | ||||
| @ -9,7 +9,18 @@ import ( | ||||
| func (ws *WebServer) configureProxy() { | ||||
| 	// Reverse proxy to the application server | ||||
| 	u, _ := url.Parse("http://localhost:8000") | ||||
| 	rp := httputil.NewSingleHostReverseProxy(u) | ||||
| 	director := func(req *http.Request) { | ||||
| 		req.URL.Scheme = u.Scheme | ||||
| 		req.URL.Host = u.Host | ||||
| 		if _, ok := req.Header["User-Agent"]; !ok { | ||||
| 			// explicitly disable User-Agent so it's not set to default value | ||||
| 			req.Header.Set("User-Agent", "") | ||||
| 		} | ||||
| 		if req.TLS != nil { | ||||
| 			req.Header.Set("X-Forwarded-Proto", "https") | ||||
| 		} | ||||
| 	} | ||||
| 	rp := &httputil.ReverseProxy{Director: director} | ||||
| 	rp.ErrorHandler = ws.proxyErrorHandler | ||||
| 	rp.ModifyResponse = ws.proxyModifyResponse | ||||
| 	ws.m.PathPrefix("/").Handler(rp) | ||||
|  | ||||
| @ -4,16 +4,19 @@ import ( | ||||
| 	"net/http" | ||||
|  | ||||
| 	"goauthentik.io/internal/config" | ||||
| 	"goauthentik.io/internal/constants" | ||||
| 	staticWeb "goauthentik.io/web" | ||||
| ) | ||||
|  | ||||
| func (ws *WebServer) configureStatic() { | ||||
| 	statRouter := ws.lh.NewRoute().Subrouter() | ||||
| 	if config.G.Debug { | ||||
| 		ws.log.Debug("Using local static files") | ||||
| 		ws.lh.PathPrefix("/static/dist").Handler(http.StripPrefix("/static/dist", http.FileServer(http.Dir("./web/dist")))) | ||||
| 		ws.lh.PathPrefix("/static/authentik").Handler(http.StripPrefix("/static/authentik", http.FileServer(http.Dir("./web/authentik")))) | ||||
| 	} else { | ||||
| 		ws.log.Debug("Using packaged static files") | ||||
| 		statRouter.Use(ws.staticHeaderMiddleware) | ||||
| 		ws.log.Debug("Using packaged static files with aggressive caching") | ||||
| 		ws.lh.PathPrefix("/static/dist").Handler(http.StripPrefix("/static", http.FileServer(http.FS(staticWeb.StaticDist)))) | ||||
| 		ws.lh.PathPrefix("/static/authentik").Handler(http.StripPrefix("/static", http.FileServer(http.FS(staticWeb.StaticAuthentik)))) | ||||
| 	} | ||||
| @ -41,3 +44,12 @@ func (ws *WebServer) configureStatic() { | ||||
| 	// Media files, always local | ||||
| 	ws.lh.PathPrefix("/media").Handler(http.StripPrefix("/media", http.FileServer(http.Dir(config.G.Paths.Media)))) | ||||
| } | ||||
|  | ||||
| func (ws *WebServer) staticHeaderMiddleware(h http.Handler) http.Handler { | ||||
| 	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | ||||
| 		w.Header().Set("Cache-Control", "\"public, no-transform\"") | ||||
| 		w.Header().Set("X-authentik-version", constants.VERSION) | ||||
| 		w.Header().Set("Vary", "X-authentik-version") | ||||
| 		h.ServeHTTP(w, r) | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| @ -1,14 +1,31 @@ | ||||
| #!/bin/bash -e | ||||
| python -m lifecycle.wait_for_db | ||||
| printf '{"event": "Bootstrap completed", "level": "info", "logger": "bootstrap", "command": "%s"}\n' "$@" > /dev/stderr | ||||
|  | ||||
| function check_if_root { | ||||
|     if [[ $EUID -ne 0 ]]; then | ||||
|         printf '{"event": "Not running as root, disabling permission fixes", "level": "info", "logger": "bootstrap", "command": "%s"}\n' "$@" > /dev/stderr | ||||
|         $1 | ||||
|         return | ||||
|     fi | ||||
|     SOCKET="/var/run/docker.sock" | ||||
|     if [[ -e "$SOCKET" ]]; then | ||||
|         # Get group ID of the docker socket, so we can create a matching group and | ||||
|         # add ourselves to it | ||||
|         DOCKER_GID=$(stat -c '%g' $SOCKET) | ||||
|         getent group $DOCKER_GID || groupadd -f -g $DOCKER_GID docker | ||||
|         usermod -a -G $DOCKER_GID authentik | ||||
|     fi | ||||
|     # Fix permissions of backups and media | ||||
|     chown -R authentik:authentik /media /backups | ||||
|     chpst -u authentik env HOME=/authentik $1 | ||||
| } | ||||
|  | ||||
| if [[ "$1" == "server" ]]; then | ||||
|     python -m lifecycle.migrate | ||||
|     /authentik-proxy | ||||
| elif [[ "$1" == "worker" ]]; then | ||||
|     celery -A authentik.root.celery worker --autoscale 3,1 -E -B -s /tmp/celerybeat-schedule -Q authentik,authentik_scheduled,authentik_events | ||||
| elif [[ "$1" == "migrate" ]]; then | ||||
|     printf "DEPERECATED: database migrations are now executed automatically on startup." | ||||
|     python -m lifecycle.migrate | ||||
|     check_if_root "celery -A authentik.root.celery worker --autoscale 3,1 -E -B -s /tmp/celerybeat-schedule -Q authentik,authentik_scheduled,authentik_events" | ||||
| elif [[ "$1" == "backup" ]]; then | ||||
|     python -m manage dbbackup --clean | ||||
| elif [[ "$1" == "restore" ]]; then | ||||
|  | ||||
| @ -113,10 +113,21 @@ stages: | ||||
|             inputs: | ||||
|               containerRegistry: 'beryjuorg-harbor' | ||||
|               repository: 'authentik/outpost-proxy' | ||||
|               command: 'buildAndPush' | ||||
|               command: 'build' | ||||
|               Dockerfile: 'outpost/proxy.Dockerfile' | ||||
|               buildContext: 'outpost/' | ||||
|               tags: "gh-$(branchName)" | ||||
|               tags: | | ||||
|                 gh-$(branchName) | ||||
|                 gh-$(Build.SourceVersion) | ||||
|               arguments: '--build-arg GIT_BUILD_HASH=$(Build.SourceVersion)' | ||||
|           - task: Docker@2 | ||||
|             inputs: | ||||
|               containerRegistry: 'beryjuorg-harbor' | ||||
|               repository: 'authentik/outpost-proxy' | ||||
|               command: 'push' | ||||
|               tags: | | ||||
|                 gh-$(branchName) | ||||
|                 gh-$(Build.SourceVersion) | ||||
|       - job: ldap_build_docker | ||||
|         pool: | ||||
|           vmImage: 'ubuntu-latest' | ||||
| @ -138,7 +149,18 @@ stages: | ||||
|             inputs: | ||||
|               containerRegistry: 'beryjuorg-harbor' | ||||
|               repository: 'authentik/outpost-ldap' | ||||
|               command: 'buildAndPush' | ||||
|               command: 'build' | ||||
|               Dockerfile: 'outpost/ldap.Dockerfile' | ||||
|               buildContext: 'outpost/' | ||||
|               tags: "gh-$(branchName)" | ||||
|               tags: | | ||||
|                 gh-$(branchName) | ||||
|                 gh-$(Build.SourceVersion) | ||||
|               arguments: '--build-arg GIT_BUILD_HASH=$(Build.SourceVersion)' | ||||
|           - task: Docker@2 | ||||
|             inputs: | ||||
|               containerRegistry: 'beryjuorg-harbor' | ||||
|               repository: 'authentik/outpost-ldap' | ||||
|               command: 'push' | ||||
|               tags: | | ||||
|                 gh-$(branchName) | ||||
|                 gh-$(Build.SourceVersion) | ||||
|  | ||||
| @ -17,6 +17,7 @@ require ( | ||||
| 	github.com/go-redis/redis/v7 v7.4.0 // indirect | ||||
| 	github.com/go-swagger/go-swagger v0.27.0 // indirect | ||||
| 	github.com/golang/protobuf v1.5.2 // indirect | ||||
| 	github.com/google/uuid v1.2.0 // indirect | ||||
| 	github.com/gorilla/websocket v1.4.2 | ||||
| 	github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a | ||||
| 	github.com/justinas/alice v1.2.0 | ||||
|  | ||||
| @ -352,6 +352,8 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4 | ||||
| github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||
| github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= | ||||
| github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||
| github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= | ||||
| github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= | ||||
| github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= | ||||
| github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= | ||||
| github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= | ||||
|  | ||||
| @ -1,4 +1,6 @@ | ||||
| FROM golang:1.16.4 AS builder | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| WORKDIR /work | ||||
|  | ||||
|  | ||||
| @ -1,13 +1,13 @@ | ||||
| package ak | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"math/rand" | ||||
| 	"net/url" | ||||
| 	"os" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/go-openapi/runtime" | ||||
| 	"github.com/google/uuid" | ||||
| 	"github.com/pkg/errors" | ||||
| 	"github.com/recws-org/recws" | ||||
| 	"goauthentik.io/outpost/pkg" | ||||
| @ -35,13 +35,14 @@ type APIController struct { | ||||
|  | ||||
| 	reloadOffset time.Duration | ||||
|  | ||||
| 	wsConn *recws.RecConn | ||||
| 	wsConn       *recws.RecConn | ||||
| 	instanceUUID uuid.UUID | ||||
| } | ||||
|  | ||||
| // NewAPIController initialise new API Controller instance from URL and API token | ||||
| func NewAPIController(akURL url.URL, token string) *APIController { | ||||
| 	transport := httptransport.New(akURL.Host, client.DefaultBasePath, []string{akURL.Scheme}) | ||||
| 	transport.Transport = SetUserAgent(getTLSTransport(), fmt.Sprintf("authentik-proxy@%s", pkg.VERSION)) | ||||
| 	transport.Transport = SetUserAgent(GetTLSTransport(), pkg.UserAgent()) | ||||
|  | ||||
| 	// create the transport | ||||
| 	auth := httptransport.BearerToken(token) | ||||
| @ -70,6 +71,7 @@ func NewAPIController(akURL url.URL, token string) *APIController { | ||||
| 		logger: log, | ||||
|  | ||||
| 		reloadOffset: time.Duration(rand.Intn(10)) * time.Second, | ||||
| 		instanceUUID: uuid.New(), | ||||
| 	} | ||||
| 	ac.logger.Debugf("HA Reload offset: %s", ac.reloadOffset) | ||||
| 	ac.initWS(akURL, outpost.Pk) | ||||
| @ -90,6 +92,10 @@ func (a *APIController) Start() error { | ||||
| 		a.logger.Debug("Starting WS Health notifier...") | ||||
| 		a.startWSHealth() | ||||
| 	}() | ||||
| 	go func() { | ||||
| 		a.logger.Debug("Starting Interval updater...") | ||||
| 		a.startIntervalUpdater() | ||||
| 	}() | ||||
| 	go func() { | ||||
| 		err := a.Server.Start() | ||||
| 		if err != nil { | ||||
|  | ||||
| @ -23,7 +23,7 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID strfmt.UUID) { | ||||
|  | ||||
| 	header := http.Header{ | ||||
| 		"Authorization": []string{authHeader}, | ||||
| 		"User-Agent":    []string{fmt.Sprintf("authentik-proxy@%s", pkg.VERSION)}, | ||||
| 		"User-Agent":    []string{pkg.UserAgent()}, | ||||
| 	} | ||||
|  | ||||
| 	value, set := os.LookupEnv("AUTHENTIK_INSECURE") | ||||
| @ -46,7 +46,9 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID strfmt.UUID) { | ||||
| 	msg := websocketMessage{ | ||||
| 		Instruction: WebsocketInstructionHello, | ||||
| 		Args: map[string]interface{}{ | ||||
| 			"version": pkg.VERSION, | ||||
| 			"version":   pkg.VERSION, | ||||
| 			"buildHash": pkg.BUILD(), | ||||
| 			"uuid":      ac.instanceUUID.String(), | ||||
| 		}, | ||||
| 	} | ||||
| 	err := ws.WriteJSON(msg) | ||||
| @ -75,7 +77,7 @@ func (ac *APIController) startWSHandler() { | ||||
| 		var wsMsg websocketMessage | ||||
| 		err := ac.wsConn.ReadJSON(&wsMsg) | ||||
| 		if err != nil { | ||||
| 			logger.Println("read:", err) | ||||
| 			logger.WithError(err).Warning("ws write error, reconnecting") | ||||
| 			ac.wsConn.CloseAndReconnect() | ||||
| 			continue | ||||
| 		} | ||||
| @ -99,15 +101,28 @@ func (ac *APIController) startWSHealth() { | ||||
| 		aliveMsg := websocketMessage{ | ||||
| 			Instruction: WebsocketInstructionHello, | ||||
| 			Args: map[string]interface{}{ | ||||
| 				"version": pkg.VERSION, | ||||
| 				"version":   pkg.VERSION, | ||||
| 				"buildHash": pkg.BUILD(), | ||||
| 				"uuid":      ac.instanceUUID.String(), | ||||
| 			}, | ||||
| 		} | ||||
| 		err := ac.wsConn.WriteJSON(aliveMsg) | ||||
| 		ac.logger.WithField("loop", "ws-health").Trace("hello'd") | ||||
| 		if err != nil { | ||||
| 			ac.logger.WithField("loop", "ws-health").Println("write:", err) | ||||
| 			ac.logger.WithField("loop", "ws-health").WithError(err).Warning("ws write error, reconnecting") | ||||
| 			ac.wsConn.CloseAndReconnect() | ||||
| 			continue | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (ac *APIController) startIntervalUpdater() { | ||||
| 	logger := ac.logger.WithField("loop", "interval-updater") | ||||
| 	ticker := time.NewTicker(time.Second * 150) | ||||
| 	for ; true; <-ticker.C { | ||||
| 		err := ac.Server.Refresh() | ||||
| 		if err != nil { | ||||
| 			logger.WithError(err).Debug("Failed to update") | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @ -20,6 +20,8 @@ func doGlobalSetup(config map[string]interface{}) { | ||||
| 		}, | ||||
| 	}) | ||||
| 	switch config[ConfigLogLevel].(string) { | ||||
| 	case "trace": | ||||
| 		log.SetLevel(log.TraceLevel) | ||||
| 	case "debug": | ||||
| 		log.SetLevel(log.DebugLevel) | ||||
| 	case "info": | ||||
| @ -31,7 +33,7 @@ func doGlobalSetup(config map[string]interface{}) { | ||||
| 	default: | ||||
| 		log.SetLevel(log.DebugLevel) | ||||
| 	} | ||||
| 	log.WithField("version", pkg.VERSION).Info("Starting authentik outpost") | ||||
| 	log.WithField("buildHash", pkg.BUILD()).WithField("version", pkg.VERSION).Info("Starting authentik outpost") | ||||
|  | ||||
| 	var dsn string | ||||
| 	if config[ConfigErrorReportingEnabled].(bool) { | ||||
| @ -50,7 +52,8 @@ func doGlobalSetup(config map[string]interface{}) { | ||||
| 	defer sentry.Flush(2 * time.Second) | ||||
| } | ||||
|  | ||||
| func getTLSTransport() http.RoundTripper { | ||||
| // GetTLSTransport Get a TLS transport instance, that skips verification if configured via environment variables. | ||||
| func GetTLSTransport() http.RoundTripper { | ||||
| 	value, set := os.LookupEnv("AUTHENTIK_INSECURE") | ||||
| 	if !set { | ||||
| 		value = "false" | ||||
|  | ||||
| @ -55,14 +55,18 @@ func (ls *LDAPServer) Start() error { | ||||
|  | ||||
| type transport struct { | ||||
| 	headers map[string]string | ||||
| 	inner   http.RoundTripper | ||||
| } | ||||
|  | ||||
| func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) { | ||||
| 	for key, value := range t.headers { | ||||
| 		req.Header.Add(key, value) | ||||
| 	} | ||||
| 	return http.DefaultTransport.RoundTrip(req) | ||||
| 	return t.inner.RoundTrip(req) | ||||
| } | ||||
| func newTransport(headers map[string]string) *transport { | ||||
| 	return &transport{headers} | ||||
| func newTransport(inner http.RoundTripper, headers map[string]string) *transport { | ||||
| 	return &transport{ | ||||
| 		inner:   inner, | ||||
| 		headers: headers, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @ -2,20 +2,22 @@ package ldap | ||||
|  | ||||
| import ( | ||||
| 	"net" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/nmcclain/ldap" | ||||
| ) | ||||
|  | ||||
| func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LDAPResultCode, error) { | ||||
| 	ls.log.WithField("boundDN", bindDN).Info("bind") | ||||
| 	ls.log.WithField("bindDN", bindDN).Info("bind") | ||||
| 	bindDN = strings.ToLower(bindDN) | ||||
| 	for _, instance := range ls.providers { | ||||
| 		username, err := instance.getUsername(bindDN) | ||||
| 		if err == nil { | ||||
| 			return instance.Bind(username, bindPW, conn) | ||||
| 			return instance.Bind(username, bindDN, bindPW, conn) | ||||
| 		} else { | ||||
| 			ls.log.WithError(err).Debug("Username not for instance") | ||||
| 		} | ||||
| 	} | ||||
| 	ls.log.WithField("boundDN", bindDN).WithField("request", "bind").Warning("No provider found for request") | ||||
| 	ls.log.WithField("bindDN", bindDN).WithField("request", "bind").Warning("No provider found for request") | ||||
| 	return ldap.LDAPResultOperationsError, nil | ||||
| } | ||||
|  | ||||
| @ -14,6 +14,8 @@ import ( | ||||
| 	goldap "github.com/go-ldap/ldap/v3" | ||||
| 	httptransport "github.com/go-openapi/runtime/client" | ||||
| 	"github.com/nmcclain/ldap" | ||||
| 	"goauthentik.io/outpost/pkg" | ||||
| 	"goauthentik.io/outpost/pkg/ak" | ||||
| 	"goauthentik.io/outpost/pkg/client/core" | ||||
| 	"goauthentik.io/outpost/pkg/client/flows" | ||||
| 	"goauthentik.io/outpost/pkg/models" | ||||
| @ -47,7 +49,7 @@ func (pi *ProviderInstance) getUsername(dn string) (string, error) { | ||||
| 	return "", errors.New("failed to find cn") | ||||
| } | ||||
|  | ||||
| func (pi *ProviderInstance) Bind(username string, bindPW string, conn net.Conn) (ldap.LDAPResultCode, error) { | ||||
| func (pi *ProviderInstance) Bind(username string, bindDN, bindPW string, conn net.Conn) (ldap.LDAPResultCode, error) { | ||||
| 	jar, err := cookiejar.New(nil) | ||||
| 	if err != nil { | ||||
| 		pi.log.WithError(err).Warning("Failed to create cookiejar") | ||||
| @ -61,15 +63,15 @@ func (pi *ProviderInstance) Bind(username string, bindPW string, conn net.Conn) | ||||
| 	// Create new http client that also sets the correct ip | ||||
| 	client := &http.Client{ | ||||
| 		Jar: jar, | ||||
| 		Transport: newTransport(map[string]string{ | ||||
| 		Transport: newTransport(ak.SetUserAgent(ak.GetTLSTransport(), pkg.UserAgent()), map[string]string{ | ||||
| 			"X-authentik-remote-ip": host, | ||||
| 		}), | ||||
| 	} | ||||
| 	params := url.Values{} | ||||
| 	params.Add("goauthentik.io/outpost/ldap", "true") | ||||
| 	passed, err := pi.solveFlowChallenge(username, bindPW, client, params.Encode()) | ||||
| 	passed, err := pi.solveFlowChallenge(username, bindPW, client, params.Encode(), 1) | ||||
| 	if err != nil { | ||||
| 		pi.log.WithField("boundDN", username).WithError(err).Warning("failed to solve challenge") | ||||
| 		pi.log.WithField("bindDN", bindDN).WithError(err).Warning("failed to solve challenge") | ||||
| 		return ldap.LDAPResultOperationsError, nil | ||||
| 	} | ||||
| 	if !passed { | ||||
| @ -82,25 +84,25 @@ func (pi *ProviderInstance) Bind(username string, bindPW string, conn net.Conn) | ||||
| 	}, httptransport.PassThroughAuth) | ||||
| 	if err != nil { | ||||
| 		if _, denied := err.(*core.CoreApplicationsCheckAccessForbidden); denied { | ||||
| 			pi.log.WithField("boundDN", username).Info("Access denied for user") | ||||
| 			pi.log.WithField("bindDN", bindDN).Info("Access denied for user") | ||||
| 			return ldap.LDAPResultInsufficientAccessRights, nil | ||||
| 		} | ||||
| 		pi.log.WithField("boundDN", username).WithError(err).Warning("failed to check access") | ||||
| 		pi.log.WithField("bindDN", bindDN).WithError(err).Warning("failed to check access") | ||||
| 		return ldap.LDAPResultOperationsError, nil | ||||
| 	} | ||||
| 	pi.log.WithField("boundDN", username).Info("User has access") | ||||
| 	pi.log.WithField("bindDN", bindDN).Info("User has access") | ||||
| 	// Get user info to store in context | ||||
| 	userInfo, err := pi.s.ac.Client.Core.CoreUsersMe(&core.CoreUsersMeParams{ | ||||
| 		Context:    context.Background(), | ||||
| 		HTTPClient: client, | ||||
| 	}, httptransport.PassThroughAuth) | ||||
| 	if err != nil { | ||||
| 		pi.log.WithField("boundDN", username).WithError(err).Warning("failed to get user info") | ||||
| 		pi.log.WithField("bindDN", bindDN).WithError(err).Warning("failed to get user info") | ||||
| 		return ldap.LDAPResultOperationsError, nil | ||||
| 	} | ||||
| 	pi.boundUsersMutex.Lock() | ||||
| 	pi.boundUsers[username] = UserFlags{ | ||||
| 		UserInfo:  userInfo.Payload.User, | ||||
| 	pi.boundUsers[bindDN] = UserFlags{ | ||||
| 		UserInfo:  *userInfo.Payload.User, | ||||
| 		CanSearch: pi.SearchAccessCheck(userInfo.Payload.User), | ||||
| 	} | ||||
| 	defer pi.boundUsersMutex.Unlock() | ||||
| @ -112,7 +114,8 @@ func (pi *ProviderInstance) Bind(username string, bindPW string, conn net.Conn) | ||||
| func (pi *ProviderInstance) SearchAccessCheck(user *models.User) bool { | ||||
| 	for _, group := range user.Groups { | ||||
| 		for _, allowedGroup := range pi.searchAllowedGroups { | ||||
| 			if &group.Pk == allowedGroup { | ||||
| 			pi.log.WithField("userGroup", group.Pk).WithField("allowedGroup", allowedGroup).Trace("Checking search access") | ||||
| 			if group.Pk.String() == allowedGroup.String() { | ||||
| 				pi.log.WithField("group", group.Name).Info("Allowed access to search") | ||||
| 				return true | ||||
| 			} | ||||
| @ -139,7 +142,7 @@ func (pi *ProviderInstance) delayDeleteUserInfo(dn string) { | ||||
| 	}() | ||||
| } | ||||
|  | ||||
| func (pi *ProviderInstance) solveFlowChallenge(bindDN string, password string, client *http.Client, urlParams string) (bool, error) { | ||||
| func (pi *ProviderInstance) solveFlowChallenge(bindDN string, password string, client *http.Client, urlParams string, depth int) (bool, error) { | ||||
| 	challenge, err := pi.s.ac.Client.Flows.FlowsExecutorGet(&flows.FlowsExecutorGetParams{ | ||||
| 		FlowSlug:   pi.flowSlug, | ||||
| 		Query:      urlParams, | ||||
| @ -169,6 +172,10 @@ func (pi *ProviderInstance) solveFlowChallenge(bindDN string, password string, c | ||||
| 	} | ||||
| 	response, err := pi.s.ac.Client.Flows.FlowsExecutorSolve(responseParams, pi.s.ac.Auth) | ||||
| 	pi.log.WithField("component", response.Payload.Component).WithField("type", *response.Payload.Type).Debug("Got response") | ||||
| 	switch response.Payload.Component { | ||||
| 	case "ak-stage-access-denied": | ||||
| 		return false, errors.New("got ak-stage-access-denied") | ||||
| 	} | ||||
| 	if *response.Payload.Type == "redirect" { | ||||
| 		return true, nil | ||||
| 	} | ||||
| @ -184,5 +191,8 @@ func (pi *ProviderInstance) solveFlowChallenge(bindDN string, password string, c | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return pi.solveFlowChallenge(bindDN, password, client, urlParams) | ||||
| 	if depth >= 10 { | ||||
| 		return false, errors.New("exceeded stage recursion depth") | ||||
| 	} | ||||
| 	return pi.solveFlowChallenge(bindDN, password, client, urlParams, depth+1) | ||||
| } | ||||
|  | ||||
| @ -29,10 +29,13 @@ func (pi *ProviderInstance) Search(bindDN string, searchReq ldap.SearchRequest, | ||||
| 	pi.boundUsersMutex.RLock() | ||||
| 	defer pi.boundUsersMutex.RUnlock() | ||||
| 	flags, ok := pi.boundUsers[bindDN] | ||||
| 	pi.log.WithField("bindDN", bindDN).WithField("ok", ok).Debugf("%+v\n", flags) | ||||
| 	if !ok { | ||||
| 		pi.log.Debug("User info not cached") | ||||
| 		return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied") | ||||
| 	} | ||||
| 	if !flags.CanSearch { | ||||
| 		pi.log.Debug("User can't search") | ||||
| 		return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied") | ||||
| 	} | ||||
|  | ||||
| @ -114,7 +117,7 @@ func (pi *ProviderInstance) Search(bindDN string, searchReq ldap.SearchRequest, | ||||
|  | ||||
| 			attrs = append(attrs, AKAttrsToLDAP(u.Attributes)...) | ||||
|  | ||||
| 			dn := fmt.Sprintf("cn=%s,%s", *u.Name, pi.UserDN) | ||||
| 			dn := fmt.Sprintf("cn=%s,%s", *u.Username, pi.UserDN) | ||||
| 			entries = append(entries, &ldap.Entry{DN: dn, Attributes: attrs}) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| @ -31,7 +31,7 @@ type ProviderInstance struct { | ||||
| } | ||||
|  | ||||
| type UserFlags struct { | ||||
| 	UserInfo  *models.User | ||||
| 	UserInfo  models.User | ||||
| 	CanSearch bool | ||||
| } | ||||
|  | ||||
|  | ||||
| @ -8,8 +8,8 @@ import ( | ||||
| 	"github.com/nmcclain/ldap" | ||||
| ) | ||||
|  | ||||
| func (ls *LDAPServer) Search(boundDN string, searchReq ldap.SearchRequest, conn net.Conn) (ldap.ServerSearchResult, error) { | ||||
| 	ls.log.WithField("boundDN", boundDN).WithField("baseDN", searchReq.BaseDN).Info("search") | ||||
| func (ls *LDAPServer) Search(bindDN string, searchReq ldap.SearchRequest, conn net.Conn) (ldap.ServerSearchResult, error) { | ||||
| 	ls.log.WithField("bindDN", bindDN).WithField("baseDN", searchReq.BaseDN).Info("search") | ||||
| 	if searchReq.BaseDN == "" { | ||||
| 		return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultSuccess}, nil | ||||
| 	} | ||||
| @ -21,7 +21,7 @@ func (ls *LDAPServer) Search(boundDN string, searchReq ldap.SearchRequest, conn | ||||
| 	for _, provider := range ls.providers { | ||||
| 		providerBase, _ := goldap.ParseDN(provider.BaseDN) | ||||
| 		if providerBase.AncestorOf(bd) { | ||||
| 			return provider.Search(boundDN, searchReq, conn) | ||||
| 			return provider.Search(bindDN, searchReq, conn) | ||||
| 		} | ||||
| 	} | ||||
| 	return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultOperationsError}, errors.New("no provider could handle request") | ||||
|  | ||||
| @ -80,19 +80,19 @@ func (pb *providerBundle) prepareOpts(provider *models.ProxyOutpostConfig) *opti | ||||
| 				ID:                    "default", | ||||
| 				URI:                   provider.InternalHost, | ||||
| 				Path:                  "/", | ||||
| 				InsecureSkipTLSVerify: provider.InternalHostSslValidation, | ||||
| 				InsecureSkipTLSVerify: !provider.InternalHostSslValidation, | ||||
| 			}, | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if provider.Certificate != nil { | ||||
| 		pb.log.WithField("provider", provider.ClientID).Debug("Enabling TLS") | ||||
| 		pb.log.WithField("provider", provider.Name).Debug("Enabling TLS") | ||||
| 		cert, err := pb.s.ak.Client.Crypto.CryptoCertificatekeypairsViewCertificate(&crypto.CryptoCertificatekeypairsViewCertificateParams{ | ||||
| 			Context: context.Background(), | ||||
| 			KpUUID:  *provider.Certificate, | ||||
| 		}, pb.s.ak.Auth) | ||||
| 		if err != nil { | ||||
| 			pb.log.WithField("provider", provider.ClientID).WithError(err).Warning("Failed to fetch certificate") | ||||
| 			pb.log.WithField("provider", provider.Name).WithError(err).Warning("Failed to fetch certificate") | ||||
| 			return providerOpts | ||||
| 		} | ||||
| 		key, err := pb.s.ak.Client.Crypto.CryptoCertificatekeypairsViewPrivateKey(&crypto.CryptoCertificatekeypairsViewPrivateKeyParams{ | ||||
| @ -100,17 +100,17 @@ func (pb *providerBundle) prepareOpts(provider *models.ProxyOutpostConfig) *opti | ||||
| 			KpUUID:  *provider.Certificate, | ||||
| 		}, pb.s.ak.Auth) | ||||
| 		if err != nil { | ||||
| 			pb.log.WithField("provider", provider.ClientID).WithError(err).Warning("Failed to fetch private key") | ||||
| 			pb.log.WithField("provider", provider.Name).WithError(err).Warning("Failed to fetch private key") | ||||
| 			return providerOpts | ||||
| 		} | ||||
|  | ||||
| 		x509cert, err := tls.X509KeyPair([]byte(cert.Payload.Data), []byte(key.Payload.Data)) | ||||
| 		if err != nil { | ||||
| 			pb.log.WithField("provider", provider.ClientID).WithError(err).Warning("Failed to parse certificate") | ||||
| 			pb.log.WithField("provider", provider.Name).WithError(err).Warning("Failed to parse certificate") | ||||
| 			return providerOpts | ||||
| 		} | ||||
| 		pb.cert = &x509cert | ||||
| 		pb.log.WithField("provider", provider.ClientID).Debug("Loaded certificates") | ||||
| 		pb.log.WithField("provider", provider.Name).Debug("Loaded certificates") | ||||
| 	} | ||||
| 	return providerOpts | ||||
| } | ||||
|  | ||||
| @ -161,7 +161,7 @@ func (p *OAuthProxy) OAuthStart(rw http.ResponseWriter, req *http.Request) { | ||||
| 		p.ErrorPage(rw, http.StatusInternalServerError, "Internal Server Error", err.Error()) | ||||
| 		return | ||||
| 	} | ||||
| 	redirectURI := p.GetRedirectURI(getHost(req)) | ||||
| 	redirectURI := p.GetRedirectURI(req.Host) | ||||
| 	http.Redirect(rw, req, p.provider.GetLoginURL(redirectURI, fmt.Sprintf("%v:%v", nonce, redirect)), http.StatusFound) | ||||
| } | ||||
|  | ||||
| @ -184,7 +184,7 @@ func (p *OAuthProxy) OAuthCallback(rw http.ResponseWriter, req *http.Request) { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	session, err := p.redeemCode(req.Context(), getHost(req), req.Form.Get("code")) | ||||
| 	session, err := p.redeemCode(req.Context(), req.Host, req.Form.Get("code")) | ||||
| 	if err != nil { | ||||
| 		p.logger.Errorf("Error redeeming code during OAuth2 callback: %v", err) | ||||
| 		p.ErrorPage(rw, http.StatusInternalServerError, "Internal Server Error", "Internal Error") | ||||
| @ -207,7 +207,7 @@ func (p *OAuthProxy) OAuthCallback(rw http.ResponseWriter, req *http.Request) { | ||||
| 	} | ||||
| 	p.ClearCSRFCookie(rw, req) | ||||
| 	if c.Value != nonce { | ||||
| 		p.logger.WithField("user", session.Email).WithField("status", "AuthFailure").Info("Invalid authentication via OAuth2: CSRF token mismatch, potential attack") | ||||
| 		p.logger.WithField("is", c.Value).WithField("should", nonce).WithField("user", session.Email).WithField("status", "AuthFailure").Info("Invalid authentication via OAuth2: CSRF token mismatch, potential attack") | ||||
| 		p.ErrorPage(rw, http.StatusForbidden, "Permission Denied", "CSRF Failed") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| @ -57,7 +57,7 @@ func (s *Server) handler(w http.ResponseWriter, r *http.Request) { | ||||
| 		for k := range s.Handlers { | ||||
| 			hostKeys = append(hostKeys, k) | ||||
| 		} | ||||
| 		s.logger.WithField("host", host).WithField("known-hosts", strings.Join(hostKeys, ", ")).Debug("Host header does not match any we know of") | ||||
| 		s.logger.WithField("host", host).WithField("known-hosts", strings.Join(hostKeys, ",")).Debug("Host header does not match any we know of") | ||||
| 		w.WriteHeader(404) | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| @ -13,12 +13,14 @@ func getTemplates() *template.Template { | ||||
| <head> | ||||
| 	<title>{{.Title}}</title> | ||||
| 	<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no"> | ||||
| 	<style>* { font-family: sans-serif; }</style> | ||||
| </head> | ||||
| <body> | ||||
| 	<h2>{{.Title}}</h2> | ||||
| 	<p>{{.Message}}</p> | ||||
| 	<hr> | ||||
| 	<p><a href="{{.ProxyPrefix}}/sign_in">Sign In</a></p> | ||||
| 	<p>Powered by <a href="https://goauthentik.io">authentik</a></p> | ||||
| </body> | ||||
| </html>{{end}}`) | ||||
| 	if err != nil { | ||||
|  | ||||
| @ -1,12 +1,20 @@ | ||||
| package proxy | ||||
|  | ||||
| import "net/http" | ||||
| import ( | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| ) | ||||
|  | ||||
| var xForwardedHost = http.CanonicalHeaderKey("X-Forwarded-Host") | ||||
|  | ||||
| func getHost(req *http.Request) string { | ||||
| 	host := req.Host | ||||
| 	if req.Header.Get(xForwardedHost) != "" { | ||||
| 		return req.Header.Get(xForwardedHost) | ||||
| 		host = req.Header.Get(xForwardedHost) | ||||
| 	} | ||||
| 	return req.Host | ||||
| 	hostOnly, _, err := net.SplitHostPort(host) | ||||
| 	if err != nil { | ||||
| 		return host | ||||
| 	} | ||||
| 	return hostOnly | ||||
| } | ||||
|  | ||||
| @ -1,3 +1,16 @@ | ||||
| package pkg | ||||
|  | ||||
| const VERSION = "2021.5.1-rc6" | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| ) | ||||
|  | ||||
| const VERSION = "2021.5.4" | ||||
|  | ||||
| func BUILD() string { | ||||
| 	return os.Getenv("GIT_BUILD_HASH") | ||||
| } | ||||
|  | ||||
| func UserAgent() string { | ||||
| 	return fmt.Sprintf("authentik-outpost@%s (%s)", VERSION, BUILD()) | ||||
| } | ||||
|  | ||||
| @ -1,4 +1,6 @@ | ||||
| FROM golang:1.16.4 AS builder | ||||
| ARG GIT_BUILD_HASH | ||||
| ENV GIT_BUILD_HASH=$GIT_BUILD_HASH | ||||
|  | ||||
| WORKDIR /work | ||||
|  | ||||
|  | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	