Compare commits
205 Commits
version/20
...
core/soft-
| Author | SHA1 | Date | |
|---|---|---|---|
| a5379c35aa | |||
| e4c11a5284 | |||
| a4853a1e09 | |||
| b65b72d910 | |||
| cd7be6a1a4 | |||
| e5cb8ef541 | |||
| 919b56c466 | |||
| db7bc8b7ad | |||
| 5768cb5858 | |||
| 5b77bc33c7 | |||
| 93650e925a | |||
| 83823068fe | |||
| d922f41438 | |||
| ef3d545d7a | |||
| e9efbb2161 | |||
| a3634ab21d | |||
| f28209548b | |||
| 9589e04072 | |||
| 6490703ba3 | |||
| ca2fce05f5 | |||
| 9dc813d9ab | |||
| 833c66a9dd | |||
| 5d54f696d4 | |||
| 0746652995 | |||
| a1a55c644a | |||
| fce57d258e | |||
| 17e30b7adc | |||
| c5b3f8a578 | |||
| 3a3619fa18 | |||
| 21b933efff | |||
| 97fc2cba69 | |||
| 7ef627d476 | |||
| d16c603499 | |||
| 99a69bb52f | |||
| ac8192d660 | |||
| cdf3449230 | |||
| ef2a40ed7d | |||
| 09cacbd76b | |||
| cb33f0d1e2 | |||
| 90af4b29a6 | |||
| 6b9158591e | |||
| 9c15cda191 | |||
| 046b8d5cbf | |||
| 8b74b83983 | |||
| 8de038b387 | |||
| 2edc651582 | |||
| 85594a119c | |||
| 1a97ccea03 | |||
| 99ad492951 | |||
| ff4ec6f9b4 | |||
| 0c49de67b8 | |||
| 0d73528ec7 | |||
| 80ca4e5722 | |||
| 1ebe200a46 | |||
| 5683c81f27 | |||
| f0477309d5 | |||
| 04d613d213 | |||
| b5928c2f7f | |||
| c8e7247d2c | |||
| ac6266a23a | |||
| 88213f67ee | |||
| f8fd17f77e | |||
| 7f127ee515 | |||
| ed214b4ac8 | |||
| aeb1b450eb | |||
| 18b4b2d7b2 | |||
| a140bad8fb | |||
| bb1b8ab7bb | |||
| 6802614fbf | |||
| 619113e810 | |||
| a8697bf1ad | |||
| f52dec4b7e | |||
| 6560bf18a4 | |||
| 315cd40e6a | |||
| a7a62b5005 | |||
| 37e3998211 | |||
| 31be26ebbd | |||
| 42b1cb06fb | |||
| 066ec35adf | |||
| 87a808a747 | |||
| d8b1cd757e | |||
| b1b9c8e0e5 | |||
| a0a617055b | |||
| 9ec6f548a6 | |||
| 46980db582 | |||
| d8fd1ddec6 | |||
| 74d29e2374 | |||
| 801a28ef65 | |||
| 3fff090612 | |||
| b071d55b4d | |||
| 244cbc5b6d | |||
| 74da359dd5 | |||
| 56b73e3bd5 | |||
| 59e3c85568 | |||
| 746c933e63 | |||
| f165bbca5d | |||
| f335b08ec2 | |||
| 6e831a4253 | |||
| 6c1687c569 | |||
| 09c64e2354 | |||
| 0a312821ee | |||
| 06d1062423 | |||
| dcfa3dc88a | |||
| c45bb8e985 | |||
| 3e4fea875a | |||
| c7670d271a | |||
| 570f3a4d42 | |||
| 3c54e94c6e | |||
| 26daaeb57d | |||
| a60442fc2c | |||
| 8790f7059a | |||
| 49cf10e9bd | |||
| 13da6f5151 | |||
| a1e0564f8f | |||
| 55f3664063 | |||
| baabd8614f | |||
| 79df24f4eb | |||
| f1afc4d263 | |||
| 643a256f01 | |||
| b7f92ef0ea | |||
| e33ca93f05 | |||
| 79af8b8638 | |||
| d2b8bd3635 | |||
| 02e01559f4 | |||
| b0c39e4843 | |||
| 039570a140 | |||
| fdc7dedc58 | |||
| 098fcdeaf2 | |||
| 3cf9278bea | |||
| 13ccb352d7 | |||
| c5b099856d | |||
| 6d912be7f6 | |||
| 0c54d266d3 | |||
| c4784cf383 | |||
| 44ccbe2fdf | |||
| d2615f0d6a | |||
| 5ab3cf4952 | |||
| 1926a472cd | |||
| d220ca6bab | |||
| 759ea731bf | |||
| e01fd5eb1a | |||
| e716e24ec6 | |||
| e9c84b8bfb | |||
| 130adf9d26 | |||
| 6aab505cd7 | |||
| a9c597bc08 | |||
| 853239dff9 | |||
| 8f8c3e4944 | |||
| dde9960b9c | |||
| b1e48a6c1a | |||
| b704e9031e | |||
| 15ef5dc792 | |||
| 6c4a1850b0 | |||
| 183d036f3c | |||
| b324dc0ce2 | |||
| 6ad7be65ec | |||
| 8bf335a2a5 | |||
| 45709770f4 | |||
| 6158dd80ca | |||
| 468d26c587 | |||
| c39a97ca58 | |||
| 8f0810ebb3 | |||
| 98e0f12d17 | |||
| 8d37e83df7 | |||
| a306bb8384 | |||
| c80116475b | |||
| 2997382df2 | |||
| 65e48907d3 | |||
| 1c4848ed8f | |||
| 64f7fa62dd | |||
| 16abaa8016 | |||
| 4cc4a3e4b8 | |||
| 8abe1f61ea | |||
| 6712095d7e | |||
| 5ab308bfd7 | |||
| 8b93fbcc69 | |||
| f641670139 | |||
| 80af26ef50 | |||
| 64ce170882 | |||
| b6171aa1a4 | |||
| 087582abbd | |||
| 6b6d88b81b | |||
| 55e5d36df5 | |||
| fc43e841c9 | |||
| 895ed6fbdc | |||
| f3965261c5 | |||
| 34ee6dc2b7 | |||
| 55fe4b0bc0 | |||
| 8d745609f9 | |||
| 55edb10da0 | |||
| 66e4b3af36 | |||
| d44fc7790e | |||
| 291972628a | |||
| 019221c433 | |||
| b99fa9f8f8 | |||
| 5bde2772c3 | |||
| 10884a7770 | |||
| e858d09d28 | |||
| 856717395e | |||
| b7793200de | |||
| bcc0323523 | |||
| 643c1f5bbf | |||
| 1fca246839 | |||
| b73e68a94c | |||
| f9d3c4c9a7 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2024.4.4
|
current_version = 2024.4.2
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
|
|||||||
@ -54,9 +54,9 @@ image_main_tag = image_tags[0]
|
|||||||
image_tags_rendered = ",".join(image_tags)
|
image_tags_rendered = ",".join(image_tags)
|
||||||
|
|
||||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||||
print("shouldBuild=%s" % should_build, file=_output)
|
print(f"shouldBuild={should_build}", file=_output)
|
||||||
print("sha=%s" % sha, file=_output)
|
print(f"sha={sha}", file=_output)
|
||||||
print("version=%s" % version, file=_output)
|
print(f"version={version}", file=_output)
|
||||||
print("prerelease=%s" % prerelease, file=_output)
|
print(f"prerelease={prerelease}", file=_output)
|
||||||
print("imageTags=%s" % image_tags_rendered, file=_output)
|
print(f"imageTags={image_tags_rendered}", file=_output)
|
||||||
print("imageMainTag=%s" % image_main_tag, file=_output)
|
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||||
|
|||||||
2
.github/workflows/ci-main.yml
vendored
2
.github/workflows/ci-main.yml
vendored
@ -130,7 +130,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
uses: helm/kind-action@v1.9.0
|
uses: helm/kind-action@v1.10.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
poetry run coverage run manage.py test tests/integration
|
||||||
|
|||||||
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v6
|
||||||
with:
|
with:
|
||||||
version: v1.54.2
|
version: v1.54.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
|
|||||||
4
.github/workflows/release-publish.yml
vendored
4
.github/workflows/release-publish.yml
vendored
@ -155,8 +155,8 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
|
||||||
docker compose pull -q
|
docker compose pull -q
|
||||||
docker compose up --no-start
|
docker compose up --no-start
|
||||||
docker compose start postgresql redis
|
docker compose start postgresql redis
|
||||||
|
|||||||
4
.github/workflows/release-tag.yml
vendored
4
.github/workflows/release-tag.yml
vendored
@ -14,8 +14,8 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
|
||||||
docker buildx install
|
docker buildx install
|
||||||
mkdir -p ./gen-ts-api
|
mkdir -p ./gen-ts-api
|
||||||
docker build -t testing:latest .
|
docker build -t testing:latest .
|
||||||
|
|||||||
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@ -4,20 +4,21 @@
|
|||||||
"asgi",
|
"asgi",
|
||||||
"authentik",
|
"authentik",
|
||||||
"authn",
|
"authn",
|
||||||
|
"entra",
|
||||||
"goauthentik",
|
"goauthentik",
|
||||||
"jwks",
|
"jwks",
|
||||||
|
"kubernetes",
|
||||||
"oidc",
|
"oidc",
|
||||||
"openid",
|
"openid",
|
||||||
|
"passwordless",
|
||||||
"plex",
|
"plex",
|
||||||
"saml",
|
"saml",
|
||||||
"totp",
|
|
||||||
"webauthn",
|
|
||||||
"traefik",
|
|
||||||
"passwordless",
|
|
||||||
"kubernetes",
|
|
||||||
"sso",
|
|
||||||
"slo",
|
|
||||||
"scim",
|
"scim",
|
||||||
|
"slo",
|
||||||
|
"sso",
|
||||||
|
"totp",
|
||||||
|
"traefik",
|
||||||
|
"webauthn",
|
||||||
],
|
],
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
"todo-tree.tree.showBadges": true,
|
"todo-tree.tree.showBadges": true,
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
|
|||||||
RUN npm run build-bundled
|
RUN npm run build-bundled
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
@ -38,7 +38,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
|||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.2-bookworm AS go-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.3-bookworm AS go-builder
|
||||||
|
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|||||||
5
Makefile
5
Makefile
@ -19,6 +19,7 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
|||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
-S 'web/src/locales/**' \
|
-S 'web/src/locales/**' \
|
||||||
|
-S 'website/developer-docs/api/reference/**' \
|
||||||
authentik \
|
authentik \
|
||||||
internal \
|
internal \
|
||||||
cmd \
|
cmd \
|
||||||
@ -46,8 +47,8 @@ test-go:
|
|||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker: ## Run all tests in a docker-compose
|
test-docker: ## Run all tests in a docker-compose
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(shell openssl rand 32 | base64)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64)" >> .env
|
||||||
docker compose pull -q
|
docker compose pull -q
|
||||||
docker compose up --no-start
|
docker compose up --no-start
|
||||||
docker compose start postgresql redis
|
docker compose start postgresql redis
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
__version__ = "2024.4.4"
|
__version__ = "2024.4.2"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,6 @@ from collections.abc import Iterable
|
|||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
from django.db.models import Model, Q, QuerySet
|
from django.db.models import Model, Q, QuerySet
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
@ -47,8 +46,6 @@ class Exporter:
|
|||||||
def get_model_instances(self, model: type[Model]) -> QuerySet:
|
def get_model_instances(self, model: type[Model]) -> QuerySet:
|
||||||
"""Return a queryset for `model`. Can be used to filter some
|
"""Return a queryset for `model`. Can be used to filter some
|
||||||
objects on some models"""
|
objects on some models"""
|
||||||
if model == get_user_model():
|
|
||||||
return model.objects.exclude_anonymous()
|
|
||||||
return model.objects.all()
|
return model.objects.all()
|
||||||
|
|
||||||
def _pre_export(self, blueprint: Blueprint):
|
def _pre_export(self, blueprint: Blueprint):
|
||||||
|
|||||||
@ -39,6 +39,14 @@ from authentik.core.models import (
|
|||||||
)
|
)
|
||||||
from authentik.enterprise.license import LicenseKey
|
from authentik.enterprise.license import LicenseKey
|
||||||
from authentik.enterprise.models import LicenseUsage
|
from authentik.enterprise.models import LicenseUsage
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||||
from authentik.events.logs import LogEvent, capture_logs
|
from authentik.events.logs import LogEvent, capture_logs
|
||||||
from authentik.events.models import SystemTask
|
from authentik.events.models import SystemTask
|
||||||
@ -86,6 +94,7 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
# Classes that have other dependencies
|
# Classes that have other dependencies
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
# Classes which are only internally managed
|
# Classes which are only internally managed
|
||||||
|
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||||
FlowToken,
|
FlowToken,
|
||||||
LicenseUsage,
|
LicenseUsage,
|
||||||
SCIMGroup,
|
SCIMGroup,
|
||||||
@ -100,6 +109,10 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
WebAuthnDeviceType,
|
WebAuthnDeviceType,
|
||||||
SCIMSourceUser,
|
SCIMSourceUser,
|
||||||
SCIMSourceGroup,
|
SCIMSourceGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -17,6 +17,7 @@ from rest_framework.fields import CharField, IntegerField, SerializerMethodField
|
|||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
|
from rest_framework.validators import UniqueValidator
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
@ -100,7 +101,10 @@ class GroupSerializer(ModelSerializer):
|
|||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"users": {
|
"users": {
|
||||||
"default": list,
|
"default": list,
|
||||||
}
|
},
|
||||||
|
# TODO: This field isn't unique on the database which is hard to backport
|
||||||
|
# hence we just validate the uniqueness here
|
||||||
|
"name": {"validators": [UniqueValidator(Group.objects.all())]},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -63,8 +63,12 @@ class ProviderFilter(FilterSet):
|
|||||||
"""Filter for providers"""
|
"""Filter for providers"""
|
||||||
|
|
||||||
application__isnull = BooleanFilter(method="filter_application__isnull")
|
application__isnull = BooleanFilter(method="filter_application__isnull")
|
||||||
backchannel_only = BooleanFilter(
|
backchannel = BooleanFilter(
|
||||||
method="filter_backchannel_only",
|
method="filter_backchannel",
|
||||||
|
label=_(
|
||||||
|
"When not set all providers are returned. When set to true, only backchannel "
|
||||||
|
"providers are returned. When set to false, backchannel providers are excluded"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def filter_application__isnull(self, queryset: QuerySet, name, value):
|
def filter_application__isnull(self, queryset: QuerySet, name, value):
|
||||||
@ -75,8 +79,9 @@ class ProviderFilter(FilterSet):
|
|||||||
| Q(application__isnull=value)
|
| Q(application__isnull=value)
|
||||||
)
|
)
|
||||||
|
|
||||||
def filter_backchannel_only(self, queryset: QuerySet, name, value):
|
def filter_backchannel(self, queryset: QuerySet, name, value):
|
||||||
"""Only return backchannel providers"""
|
"""By default all providers are returned. When set to true, only backchannel providers are
|
||||||
|
returned. When set to false, backchannel providers are excluded"""
|
||||||
return queryset.filter(is_backchannel=value)
|
return queryset.filter(is_backchannel=value)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -45,13 +45,6 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
|||||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||||
self.fields["key"] = CharField(required=False)
|
self.fields["key"] = CharField(required=False)
|
||||||
|
|
||||||
def validate_user(self, user: User):
|
|
||||||
"""Ensure user of token cannot be changed"""
|
|
||||||
if self.instance and self.instance.user_id:
|
|
||||||
if user.pk != self.instance.user_id:
|
|
||||||
raise ValidationError("User cannot be changed")
|
|
||||||
return user
|
|
||||||
|
|
||||||
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
||||||
"""Ensure only API or App password tokens are created."""
|
"""Ensure only API or App password tokens are created."""
|
||||||
request: Request = self.context.get("request")
|
request: Request = self.context.get("request")
|
||||||
|
|||||||
@ -14,7 +14,6 @@ from rest_framework.request import Request
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
|
|
||||||
class DeleteAction(Enum):
|
class DeleteAction(Enum):
|
||||||
@ -54,7 +53,7 @@ class UsedByMixin:
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={200: UsedBySerializer(many=True)},
|
responses={200: UsedBySerializer(many=True)},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||||
"""Get a list of all objects that use this object"""
|
"""Get a list of all objects that use this object"""
|
||||||
model: Model = self.get_object()
|
model: Model = self.get_object()
|
||||||
|
|||||||
@ -408,7 +408,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
filterset_class = UsersFilter
|
filterset_class = UsersFilter
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
base_qs = User.objects.all().exclude_anonymous()
|
base_qs = User.objects.all()
|
||||||
if self.serializer_class(context={"request": self.request})._should_include_groups:
|
if self.serializer_class(context={"request": self.request})._should_include_groups:
|
||||||
base_qs = base_qs.prefetch_related("ak_groups")
|
base_qs = base_qs.prefetch_related("ak_groups")
|
||||||
return base_qs
|
return base_qs
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
"""authentik core exceptions"""
|
|
||||||
|
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
|
||||||
|
|
||||||
|
|
||||||
class PropertyMappingExpressionException(SentryIgnoredException):
|
|
||||||
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""
|
|
||||||
@ -6,6 +6,7 @@ from django.db.models import Model
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from prometheus_client import Histogram
|
from prometheus_client import Histogram
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import SkipObjectException
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||||
@ -47,6 +48,7 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
self._context["request"] = req
|
self._context["request"] = req
|
||||||
req.context.update(**kwargs)
|
req.context.update(**kwargs)
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
|
self._globals["SkipObject"] = SkipObjectException
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
|
|
||||||
def handle_error(self, exc: Exception, expression_source: str):
|
def handle_error(self, exc: Exception, expression_source: str):
|
||||||
|
|||||||
13
authentik/core/expression/exceptions.py
Normal file
13
authentik/core/expression/exceptions.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""authentik core exceptions"""
|
||||||
|
|
||||||
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
|
|
||||||
|
|
||||||
|
class PropertyMappingExpressionException(SentryIgnoredException):
|
||||||
|
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""
|
||||||
|
|
||||||
|
|
||||||
|
class SkipObjectException(PropertyMappingExpressionException):
|
||||||
|
"""Exception which can be raised in a property mapping to skip syncing an object.
|
||||||
|
Only applies to Property mappings which sync objects, and not on mappings which transitively
|
||||||
|
apply to a single user"""
|
||||||
@ -10,7 +10,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
|
|
||||||
import authentik.core.models
|
import authentik.core.models
|
||||||
import authentik.lib.models
|
import authentik.lib.validators
|
||||||
|
|
||||||
|
|
||||||
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
@ -160,7 +160,7 @@ class Migration(migrations.Migration):
|
|||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
blank=True,
|
blank=True,
|
||||||
default="",
|
default="",
|
||||||
validators=[authentik.lib.models.DomainlessFormattedURLValidator()],
|
validators=[authentik.lib.validators.DomainlessFormattedURLValidator()],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.RunPython(
|
migrations.RunPython(
|
||||||
|
|||||||
@ -7,9 +7,10 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
|
|
||||||
|
|
||||||
def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
from authentik.core.models import BackchannelProvider
|
from authentik.providers.ldap.models import LDAPProvider
|
||||||
|
from authentik.providers.scim.models import SCIMProvider
|
||||||
|
|
||||||
for model in BackchannelProvider.__subclasses__():
|
for model in [LDAPProvider, SCIMProvider]:
|
||||||
try:
|
try:
|
||||||
for obj in model.objects.only("is_backchannel"):
|
for obj in model.objects.only("is_backchannel"):
|
||||||
obj.is_backchannel = True
|
obj.is_backchannel = True
|
||||||
|
|||||||
23
authentik/core/migrations/0036_user_group_soft_delete.py
Normal file
23
authentik/core/migrations/0036_user_group_soft_delete.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 5.0.4 on 2024-04-23 16:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="group",
|
||||||
|
name="deleted_at",
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="deleted_at",
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -22,16 +22,18 @@ from rest_framework.serializers import Serializer
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.models import ManagedModel
|
from authentik.blueprints.models import ManagedModel
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||||
from authentik.lib.avatars import get_avatar
|
from authentik.lib.avatars import get_avatar
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.models import (
|
from authentik.lib.models import (
|
||||||
CreatedUpdatedModel,
|
CreatedUpdatedModel,
|
||||||
DomainlessFormattedURLValidator,
|
|
||||||
SerializerModel,
|
SerializerModel,
|
||||||
|
SoftDeleteModel,
|
||||||
|
SoftDeleteQuerySet,
|
||||||
)
|
)
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from authentik.lib.validators import DomainlessFormattedURLValidator
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGTH
|
from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGTH
|
||||||
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
|
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
|
||||||
@ -96,7 +98,7 @@ class UserTypes(models.TextChoices):
|
|||||||
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
|
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
|
||||||
|
|
||||||
|
|
||||||
class Group(SerializerModel):
|
class Group(SoftDeleteModel, SerializerModel):
|
||||||
"""Group model which supports a basic hierarchy and has attributes"""
|
"""Group model which supports a basic hierarchy and has attributes"""
|
||||||
|
|
||||||
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
@ -186,31 +188,21 @@ class Group(SerializerModel):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class UserQuerySet(models.QuerySet):
|
|
||||||
"""User queryset"""
|
|
||||||
|
|
||||||
def exclude_anonymous(self):
|
|
||||||
"""Exclude anonymous user"""
|
|
||||||
return self.exclude(**{User.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME})
|
|
||||||
|
|
||||||
|
|
||||||
class UserManager(DjangoUserManager):
|
class UserManager(DjangoUserManager):
|
||||||
"""User manager that doesn't assign is_superuser and is_staff"""
|
"""User manager that doesn't assign is_superuser and is_staff"""
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
"""Create special user queryset"""
|
"""Create special user queryset"""
|
||||||
return UserQuerySet(self.model, using=self._db)
|
return SoftDeleteQuerySet(self.model, using=self._db).exclude(
|
||||||
|
**{User.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME}
|
||||||
|
)
|
||||||
|
|
||||||
def create_user(self, username, email=None, password=None, **extra_fields):
|
def create_user(self, username, email=None, password=None, **extra_fields):
|
||||||
"""User manager that doesn't assign is_superuser and is_staff"""
|
"""User manager that doesn't assign is_superuser and is_staff"""
|
||||||
return self._create_user(username, email, password, **extra_fields)
|
return self._create_user(username, email, password, **extra_fields)
|
||||||
|
|
||||||
def exclude_anonymous(self) -> QuerySet:
|
|
||||||
"""Exclude anonymous user"""
|
|
||||||
return self.get_queryset().exclude_anonymous()
|
|
||||||
|
|
||||||
|
class User(SoftDeleteModel, SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|
||||||
"""authentik User model, based on django's contrib auth user model."""
|
"""authentik User model, based on django's contrib auth user model."""
|
||||||
|
|
||||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.core.models import PropertyMapping
|
from authentik.core.models import PropertyMapping
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
@ -66,14 +66,11 @@ class TestPropertyMappings(TestCase):
|
|||||||
expression="return request.http_request.path",
|
expression="return request.http_request.path",
|
||||||
)
|
)
|
||||||
http_request = self.factory.get("/")
|
http_request = self.factory.get("/")
|
||||||
tmpl = (
|
tmpl = f"""
|
||||||
"""
|
res = ak_call_policy('{expr.name}')
|
||||||
res = ak_call_policy('%s')
|
|
||||||
result = [request.http_request.path, res.raw_result]
|
result = [request.http_request.path, res.raw_result]
|
||||||
return result
|
return result
|
||||||
"""
|
"""
|
||||||
% expr.name
|
|
||||||
)
|
|
||||||
evaluator = PropertyMapping(expression=tmpl, name=generate_id())
|
evaluator = PropertyMapping(expression=tmpl, name=generate_id())
|
||||||
res = evaluator.evaluate(self.user, http_request)
|
res = evaluator.evaluate(self.user, http_request)
|
||||||
self.assertEqual(res, ["/", "/"])
|
self.assertEqual(res, ["/", "/"])
|
||||||
|
|||||||
@ -13,8 +13,9 @@ from authentik.core.models import (
|
|||||||
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
|
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
|
||||||
Token,
|
Token,
|
||||||
TokenIntents,
|
TokenIntents,
|
||||||
|
User,
|
||||||
)
|
)
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
@ -23,7 +24,7 @@ class TestTokenAPI(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = create_test_user()
|
self.user = User.objects.create(username="testuser")
|
||||||
self.admin = create_test_admin_user()
|
self.admin = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
@ -153,24 +154,6 @@ class TestTokenAPI(APITestCase):
|
|||||||
self.assertEqual(token.expiring, True)
|
self.assertEqual(token.expiring, True)
|
||||||
self.assertNotEqual(token.expires.timestamp(), expires.timestamp())
|
self.assertNotEqual(token.expires.timestamp(), expires.timestamp())
|
||||||
|
|
||||||
def test_token_change_user(self):
|
|
||||||
"""Test creating a token and then changing the user"""
|
|
||||||
ident = generate_id()
|
|
||||||
response = self.client.post(reverse("authentik_api:token-list"), {"identifier": ident})
|
|
||||||
self.assertEqual(response.status_code, 201)
|
|
||||||
token = Token.objects.get(identifier=ident)
|
|
||||||
self.assertEqual(token.user, self.user)
|
|
||||||
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
|
||||||
self.assertEqual(token.expiring, True)
|
|
||||||
self.assertTrue(self.user.has_perm("authentik_core.view_token_key", token))
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:token-detail", kwargs={"identifier": ident}),
|
|
||||||
data={"identifier": "user_token_poc_v3", "intent": "api", "user": self.admin.pk},
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
||||||
token.refresh_from_db()
|
|
||||||
self.assertEqual(token.user, self.user)
|
|
||||||
|
|
||||||
def test_list(self):
|
def test_list(self):
|
||||||
"""Test Token List (Test normal authentication)"""
|
"""Test Token List (Test normal authentication)"""
|
||||||
Token.objects.all().delete()
|
Token.objects.all().delete()
|
||||||
|
|||||||
@ -36,7 +36,6 @@ from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
|
|||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -267,7 +266,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
|||||||
],
|
],
|
||||||
responses={200: CertificateDataSerializer(many=False)},
|
responses={200: CertificateDataSerializer(many=False)},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def view_certificate(self, request: Request, pk: str) -> Response:
|
def view_certificate(self, request: Request, pk: str) -> Response:
|
||||||
"""Return certificate-key pairs certificate and log access"""
|
"""Return certificate-key pairs certificate and log access"""
|
||||||
certificate: CertificateKeyPair = self.get_object()
|
certificate: CertificateKeyPair = self.get_object()
|
||||||
@ -297,7 +296,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
|||||||
],
|
],
|
||||||
responses={200: CertificateDataSerializer(many=False)},
|
responses={200: CertificateDataSerializer(many=False)},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def view_private_key(self, request: Request, pk: str) -> Response:
|
def view_private_key(self, request: Request, pk: str) -> Response:
|
||||||
"""Return certificate-key pairs private key and log access"""
|
"""Return certificate-key pairs private key and log access"""
|
||||||
certificate: CertificateKeyPair = self.get_object()
|
certificate: CertificateKeyPair = self.get_object()
|
||||||
|
|||||||
@ -214,46 +214,6 @@ class TestCrypto(APITestCase):
|
|||||||
self.assertEqual(200, response.status_code)
|
self.assertEqual(200, response.status_code)
|
||||||
self.assertIn("Content-Disposition", response)
|
self.assertIn("Content-Disposition", response)
|
||||||
|
|
||||||
def test_certificate_download_denied(self):
|
|
||||||
"""Test certificate export (download)"""
|
|
||||||
self.client.logout()
|
|
||||||
keypair = create_test_cert()
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-view-certificate",
|
|
||||||
kwargs={"pk": keypair.pk},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(403, response.status_code)
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-view-certificate",
|
|
||||||
kwargs={"pk": keypair.pk},
|
|
||||||
),
|
|
||||||
data={"download": True},
|
|
||||||
)
|
|
||||||
self.assertEqual(403, response.status_code)
|
|
||||||
|
|
||||||
def test_private_key_download_denied(self):
|
|
||||||
"""Test private_key export (download)"""
|
|
||||||
self.client.logout()
|
|
||||||
keypair = create_test_cert()
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-view-private-key",
|
|
||||||
kwargs={"pk": keypair.pk},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(403, response.status_code)
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-view-private-key",
|
|
||||||
kwargs={"pk": keypair.pk},
|
|
||||||
),
|
|
||||||
data={"download": True},
|
|
||||||
)
|
|
||||||
self.assertEqual(403, response.status_code)
|
|
||||||
|
|
||||||
def test_used_by(self):
|
def test_used_by(self):
|
||||||
"""Test used_by endpoint"""
|
"""Test used_by endpoint"""
|
||||||
self.client.force_login(create_test_admin_user())
|
self.client.force_login(create_test_admin_user())
|
||||||
@ -286,26 +246,6 @@ class TestCrypto(APITestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_used_by_denied(self):
|
|
||||||
"""Test used_by endpoint"""
|
|
||||||
self.client.logout()
|
|
||||||
keypair = create_test_cert()
|
|
||||||
OAuth2Provider.objects.create(
|
|
||||||
name=generate_id(),
|
|
||||||
client_id="test",
|
|
||||||
client_secret=generate_key(),
|
|
||||||
authorization_flow=create_test_flow(),
|
|
||||||
redirect_uris="http://localhost",
|
|
||||||
signing_key=keypair,
|
|
||||||
)
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-used-by",
|
|
||||||
kwargs={"pk": keypair.pk},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(403, response.status_code)
|
|
||||||
|
|
||||||
def test_discovery(self):
|
def test_discovery(self):
|
||||||
"""Test certificate discovery"""
|
"""Test certificate discovery"""
|
||||||
name = generate_id()
|
name = generate_id()
|
||||||
|
|||||||
@ -132,7 +132,7 @@ class LicenseKey:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def base_user_qs() -> QuerySet:
|
def base_user_qs() -> QuerySet:
|
||||||
"""Base query set for all users"""
|
"""Base query set for all users"""
|
||||||
return User.objects.all().exclude_anonymous().exclude(is_active=False)
|
return User.objects.all().exclude(is_active=False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_default_user_count():
|
def get_default_user_count():
|
||||||
|
|||||||
@ -0,0 +1,33 @@
|
|||||||
|
"""GoogleWorkspaceProviderGroup API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.sources import SourceSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.core.api.users import UserGroupSerializer
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderGroup
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderGroupSerializer(SourceSerializer):
|
||||||
|
"""GoogleWorkspaceProviderGroup Serializer"""
|
||||||
|
|
||||||
|
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = GoogleWorkspaceProviderGroup
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"group",
|
||||||
|
"group_obj",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderGroupViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""GoogleWorkspaceProviderGroup Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProviderGroup.objects.all().select_related("group")
|
||||||
|
serializer_class = GoogleWorkspaceProviderGroupSerializer
|
||||||
|
filterset_fields = ["provider__id", "group__name", "group__group_uuid"]
|
||||||
|
search_fields = ["provider__name", "group__name"]
|
||||||
|
ordering = ["group__name"]
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
"""google Property mappings API Views"""
|
||||||
|
|
||||||
|
from django_filters.filters import AllValuesMultipleFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderMapping
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderMappingSerializer(PropertyMappingSerializer):
|
||||||
|
"""GoogleWorkspaceProviderMapping Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = GoogleWorkspaceProviderMapping
|
||||||
|
fields = PropertyMappingSerializer.Meta.fields
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderMappingFilter(FilterSet):
|
||||||
|
"""Filter for GoogleWorkspaceProviderMapping"""
|
||||||
|
|
||||||
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = GoogleWorkspaceProviderMapping
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""GoogleWorkspaceProviderMapping Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProviderMapping.objects.all()
|
||||||
|
serializer_class = GoogleWorkspaceProviderMappingSerializer
|
||||||
|
filterset_class = GoogleWorkspaceProviderMappingFilter
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
@ -0,0 +1,54 @@
|
|||||||
|
"""Google Provider API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||||
|
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||||
|
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
|
"""GoogleWorkspaceProvider Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = GoogleWorkspaceProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"property_mappings",
|
||||||
|
"property_mappings_group",
|
||||||
|
"component",
|
||||||
|
"assigned_backchannel_application_slug",
|
||||||
|
"assigned_backchannel_application_name",
|
||||||
|
"verbose_name",
|
||||||
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
|
"delegated_subject",
|
||||||
|
"credentials",
|
||||||
|
"scopes",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"filter_group",
|
||||||
|
"user_delete_action",
|
||||||
|
"group_delete_action",
|
||||||
|
"default_group_email_domain",
|
||||||
|
]
|
||||||
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
|
||||||
|
"""GoogleWorkspaceProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProvider.objects.all()
|
||||||
|
serializer_class = GoogleWorkspaceProviderSerializer
|
||||||
|
filterset_fields = [
|
||||||
|
"name",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"delegated_subject",
|
||||||
|
"filter_group",
|
||||||
|
]
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
|
sync_single_task = google_workspace_sync
|
||||||
33
authentik/enterprise/providers/google_workspace/api/users.py
Normal file
33
authentik/enterprise/providers/google_workspace/api/users.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""GoogleWorkspaceProviderUser API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
|
from authentik.core.api.sources import SourceSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderUser
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderUserSerializer(SourceSerializer):
|
||||||
|
"""GoogleWorkspaceProviderUser Serializer"""
|
||||||
|
|
||||||
|
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = GoogleWorkspaceProviderUser
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"user",
|
||||||
|
"user_obj",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderUserViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""GoogleWorkspaceProviderUser Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProviderUser.objects.all().select_related("user")
|
||||||
|
serializer_class = GoogleWorkspaceProviderUserSerializer
|
||||||
|
filterset_fields = ["provider__id", "user__username", "user__id"]
|
||||||
|
search_fields = ["provider__name", "user__username"]
|
||||||
|
ordering = ["user__username"]
|
||||||
9
authentik/enterprise/providers/google_workspace/apps.py
Normal file
9
authentik/enterprise/providers/google_workspace/apps.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikEnterpriseProviderGoogleConfig(EnterpriseConfig):
|
||||||
|
|
||||||
|
name = "authentik.enterprise.providers.google_workspace"
|
||||||
|
label = "authentik_providers_google_workspace"
|
||||||
|
verbose_name = "authentik Enterprise.Providers.Google Workspace"
|
||||||
|
default = True
|
||||||
@ -0,0 +1,74 @@
|
|||||||
|
from django.db.models import Model
|
||||||
|
from django.http import HttpResponseBadRequest, HttpResponseNotFound
|
||||||
|
from google.auth.exceptions import GoogleAuthError, TransportError
|
||||||
|
from googleapiclient.discovery import build
|
||||||
|
from googleapiclient.errors import Error, HttpError
|
||||||
|
from googleapiclient.http import HttpRequest
|
||||||
|
from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||||
|
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
|
||||||
|
BaseOutgoingSyncClient[TModel, TConnection, TSchema, GoogleWorkspaceProvider]
|
||||||
|
):
|
||||||
|
"""Base client for syncing to google workspace"""
|
||||||
|
|
||||||
|
domains: list
|
||||||
|
|
||||||
|
def __init__(self, provider: GoogleWorkspaceProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.directory_service = build(
|
||||||
|
"admin",
|
||||||
|
"directory_v1",
|
||||||
|
cache_discovery=False,
|
||||||
|
**provider.google_credentials(),
|
||||||
|
)
|
||||||
|
self.__prefetch_domains()
|
||||||
|
|
||||||
|
def __prefetch_domains(self):
|
||||||
|
self.domains = []
|
||||||
|
domains = self._request(self.directory_service.domains().list(customer="my_customer"))
|
||||||
|
for domain in domains.get("domains", []):
|
||||||
|
domain_name = domain.get("domainName")
|
||||||
|
self.domains.append(domain_name)
|
||||||
|
|
||||||
|
def _request(self, request: HttpRequest):
|
||||||
|
try:
|
||||||
|
response = request.execute()
|
||||||
|
except GoogleAuthError as exc:
|
||||||
|
if isinstance(exc, TransportError):
|
||||||
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
|
raise StopSync(exc) from exc
|
||||||
|
except HttpLib2Error as exc:
|
||||||
|
if isinstance(exc, HttpLib2ErrorWithResponse):
|
||||||
|
self._response_handle_status_code(request.body, exc.response.status, exc)
|
||||||
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
|
except HttpError as exc:
|
||||||
|
self._response_handle_status_code(request.body, exc.status_code, exc)
|
||||||
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
|
except Error as exc:
|
||||||
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _response_handle_status_code(self, request: dict, status_code: int, root_exc: Exception):
|
||||||
|
if status_code == HttpResponseNotFound.status_code:
|
||||||
|
raise NotFoundSyncException("Object not found") from root_exc
|
||||||
|
if status_code == HTTP_CONFLICT:
|
||||||
|
raise ObjectExistsSyncException("Object exists") from root_exc
|
||||||
|
if status_code == HttpResponseBadRequest.status_code:
|
||||||
|
raise BadRequestSyncException("Bad request", request) from root_exc
|
||||||
|
|
||||||
|
def check_email_valid(self, *emails: str):
|
||||||
|
for email in emails:
|
||||||
|
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
|
||||||
|
raise BadRequestSyncException(f"Invalid email domain: {email}")
|
||||||
@ -0,0 +1,233 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils.text import slugify
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import (
|
||||||
|
PropertyMappingExpressionException,
|
||||||
|
SkipObjectException,
|
||||||
|
)
|
||||||
|
from authentik.core.models import Group
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
GoogleWorkspaceProviderMapping,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceGroupClient(
|
||||||
|
GoogleWorkspaceSyncClient[Group, GoogleWorkspaceProviderGroup, dict]
|
||||||
|
):
|
||||||
|
"""Google client for groups"""
|
||||||
|
|
||||||
|
connection_type = GoogleWorkspaceProviderGroup
|
||||||
|
connection_type_query = "group"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def to_schema(self, obj: Group, creating: bool) -> dict:
|
||||||
|
"""Convert authentik group"""
|
||||||
|
raw_google_group = {
|
||||||
|
"email": f"{slugify(obj.name)}@{self.provider.default_group_email_domain}"
|
||||||
|
}
|
||||||
|
for mapping in (
|
||||||
|
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
|
||||||
|
):
|
||||||
|
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
value = mapping.evaluate(
|
||||||
|
user=None,
|
||||||
|
request=None,
|
||||||
|
group=obj,
|
||||||
|
provider=self.provider,
|
||||||
|
creating=creating,
|
||||||
|
)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
always_merger.merge(raw_google_group, value)
|
||||||
|
except SkipObjectException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except (PropertyMappingExpressionException, ValueError) as exc:
|
||||||
|
# Value error can be raised when assigning invalid data to an attribute
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||||
|
mapping=mapping,
|
||||||
|
).save()
|
||||||
|
raise StopSync(exc, obj, mapping) from exc
|
||||||
|
if not raw_google_group:
|
||||||
|
raise StopSync(ValueError("No group mappings configured"), obj)
|
||||||
|
|
||||||
|
return raw_google_group
|
||||||
|
|
||||||
|
def delete(self, obj: Group):
|
||||||
|
"""Delete group"""
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=obj
|
||||||
|
).first()
|
||||||
|
if not google_group:
|
||||||
|
self.logger.debug("Group does not exist in Google, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
if self.provider.group_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
self._request(
|
||||||
|
self.directory_service.groups().delete(groupKey=google_group.google_id)
|
||||||
|
)
|
||||||
|
google_group.delete()
|
||||||
|
|
||||||
|
def create(self, group: Group):
|
||||||
|
"""Create group from scratch and create a connection object"""
|
||||||
|
google_group = self.to_schema(group, True)
|
||||||
|
self.check_email_valid(google_group["email"])
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.directory_service.groups().insert(body=google_group))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# group already exists in google workspace, so we can connect them manually
|
||||||
|
# for groups we need to fetch the group from google as we connect on
|
||||||
|
# ID and not group email
|
||||||
|
group_data = self._request(
|
||||||
|
self.directory_service.groups().get(groupKey=google_group["email"])
|
||||||
|
)
|
||||||
|
return GoogleWorkspaceProviderGroup.objects.create(
|
||||||
|
provider=self.provider, group=group, google_id=group_data["id"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return GoogleWorkspaceProviderGroup.objects.create(
|
||||||
|
provider=self.provider, group=group, google_id=response["id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, group: Group, connection: GoogleWorkspaceProviderGroup):
|
||||||
|
"""Update existing group"""
|
||||||
|
google_group = self.to_schema(group, False)
|
||||||
|
self.check_email_valid(google_group["email"])
|
||||||
|
try:
|
||||||
|
return self._request(
|
||||||
|
self.directory_service.groups().update(
|
||||||
|
groupKey=connection.google_id,
|
||||||
|
body=google_group,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except NotFoundSyncException:
|
||||||
|
# Resource missing is handled by self.write, which will re-create the group
|
||||||
|
raise
|
||||||
|
|
||||||
|
def write(self, obj: Group):
|
||||||
|
google_group, created = super().write(obj)
|
||||||
|
self.create_sync_members(obj, google_group)
|
||||||
|
return google_group, created
|
||||||
|
|
||||||
|
def create_sync_members(self, obj: Group, google_group: GoogleWorkspaceProviderGroup):
|
||||||
|
"""Sync all members after a group was created"""
|
||||||
|
users = list(obj.users.order_by("id").values_list("id", flat=True))
|
||||||
|
connections = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__pk__in=users
|
||||||
|
).values_list("google_id", flat=True)
|
||||||
|
self._patch(google_group.google_id, Direction.add, connections)
|
||||||
|
|
||||||
|
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
||||||
|
"""Update a groups members"""
|
||||||
|
if action == Direction.add:
|
||||||
|
return self._patch_add_users(group, users_set)
|
||||||
|
if action == Direction.remove:
|
||||||
|
return self._patch_remove_users(group, users_set)
|
||||||
|
|
||||||
|
def _patch(self, google_group_id: str, direction: Direction, members: list[str]):
|
||||||
|
for user in members:
|
||||||
|
try:
|
||||||
|
if direction == Direction.add:
|
||||||
|
self._request(
|
||||||
|
self.directory_service.members().insert(
|
||||||
|
groupKey=google_group_id, body={"email": user}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if direction == Direction.remove:
|
||||||
|
self._request(
|
||||||
|
self.directory_service.members().delete(
|
||||||
|
groupKey=google_group_id, memberKey=user
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
pass
|
||||||
|
except TransientSyncException:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _patch_add_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Add users in users_set to group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not google_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("google_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(google_group.google_id, Direction.add, user_ids)
|
||||||
|
|
||||||
|
def _patch_remove_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Remove users in users_set from group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not google_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("google_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(google_group.google_id, Direction.remove, user_ids)
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all groups and connect them with authentik groups if possible"""
|
||||||
|
request = self.directory_service.groups().list(
|
||||||
|
customer="my_customer", maxResults=500, orderBy="email"
|
||||||
|
)
|
||||||
|
while request:
|
||||||
|
response = request.execute()
|
||||||
|
for group in response.get("groups", []):
|
||||||
|
self._discover_single_group(group)
|
||||||
|
request = self.directory_service.groups().list_next(
|
||||||
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
|
||||||
|
def _discover_single_group(self, group: dict):
|
||||||
|
"""handle discovery of a single group"""
|
||||||
|
google_name = group["name"]
|
||||||
|
google_id = group["id"]
|
||||||
|
matching_authentik_group = (
|
||||||
|
self.provider.get_object_qs(Group).filter(name=google_name).first()
|
||||||
|
)
|
||||||
|
if not matching_authentik_group:
|
||||||
|
return
|
||||||
|
GoogleWorkspaceProviderGroup.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
group=matching_authentik_group,
|
||||||
|
google_id=google_id,
|
||||||
|
)
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
from json import dumps
|
||||||
|
|
||||||
|
from httplib2 import Response
|
||||||
|
|
||||||
|
|
||||||
|
class MockHTTP:
|
||||||
|
|
||||||
|
_recorded_requests = []
|
||||||
|
_responses = {}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
raise_on_unrecorded=True,
|
||||||
|
) -> None:
|
||||||
|
self._recorded_requests = []
|
||||||
|
self._responses = {}
|
||||||
|
self.raise_on_unrecorded = raise_on_unrecorded
|
||||||
|
|
||||||
|
def add_response(self, uri: str, body: str | dict = "", meta: dict | None = None, method="GET"):
|
||||||
|
if isinstance(body, dict):
|
||||||
|
body = dumps(body)
|
||||||
|
self._responses[(uri, method.upper())] = (body, meta or {"status": "200"})
|
||||||
|
|
||||||
|
def requests(self):
|
||||||
|
return self._recorded_requests
|
||||||
|
|
||||||
|
def request(
|
||||||
|
self,
|
||||||
|
uri,
|
||||||
|
method="GET",
|
||||||
|
body=None,
|
||||||
|
headers=None,
|
||||||
|
redirections=1,
|
||||||
|
connection_type=None,
|
||||||
|
):
|
||||||
|
key = (uri, method.upper())
|
||||||
|
self._recorded_requests.append((uri, method, body, headers))
|
||||||
|
if key not in self._responses and self.raise_on_unrecorded:
|
||||||
|
raise AssertionError(key)
|
||||||
|
body, meta = self._responses[key]
|
||||||
|
return Response(meta), body.encode("utf-8")
|
||||||
141
authentik/enterprise/providers/google_workspace/clients/users.py
Normal file
141
authentik/enterprise/providers/google_workspace/clients/users.py
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import (
|
||||||
|
PropertyMappingExpressionException,
|
||||||
|
SkipObjectException,
|
||||||
|
)
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProviderMapping,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
from authentik.policies.utils import delete_none_values
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceProviderUser, dict]):
|
||||||
|
"""Sync authentik users into google workspace"""
|
||||||
|
|
||||||
|
connection_type = GoogleWorkspaceProviderUser
|
||||||
|
connection_type_query = "user"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def to_schema(self, obj: User, creating: bool) -> dict:
|
||||||
|
"""Convert authentik user"""
|
||||||
|
raw_google_user = {}
|
||||||
|
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
|
||||||
|
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
value = mapping.evaluate(
|
||||||
|
user=obj,
|
||||||
|
request=None,
|
||||||
|
provider=self.provider,
|
||||||
|
creating=creating,
|
||||||
|
)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
always_merger.merge(raw_google_user, value)
|
||||||
|
except SkipObjectException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except (PropertyMappingExpressionException, ValueError) as exc:
|
||||||
|
# Value error can be raised when assigning invalid data to an attribute
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||||
|
mapping=mapping,
|
||||||
|
).save()
|
||||||
|
raise StopSync(exc, obj, mapping) from exc
|
||||||
|
if not raw_google_user:
|
||||||
|
raise StopSync(ValueError("No user mappings configured"), obj)
|
||||||
|
if "primaryEmail" not in raw_google_user:
|
||||||
|
raw_google_user["primaryEmail"] = str(obj.email)
|
||||||
|
return delete_none_values(raw_google_user)
|
||||||
|
|
||||||
|
def delete(self, obj: User):
|
||||||
|
"""Delete user"""
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=obj
|
||||||
|
).first()
|
||||||
|
if not google_user:
|
||||||
|
self.logger.debug("User does not exist in Google, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
response = None
|
||||||
|
if self.provider.user_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
response = self._request(
|
||||||
|
self.directory_service.users().delete(userKey=google_user.google_id)
|
||||||
|
)
|
||||||
|
elif self.provider.user_delete_action == OutgoingSyncDeleteAction.SUSPEND:
|
||||||
|
response = self._request(
|
||||||
|
self.directory_service.users().update(
|
||||||
|
userKey=google_user.google_id, body={"suspended": True}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
google_user.delete()
|
||||||
|
return response
|
||||||
|
|
||||||
|
def create(self, user: User):
|
||||||
|
"""Create user from scratch and create a connection object"""
|
||||||
|
google_user = self.to_schema(user, True)
|
||||||
|
self.check_email_valid(
|
||||||
|
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
||||||
|
)
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.directory_service.users().insert(body=google_user))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# user already exists in google workspace, so we can connect them manually
|
||||||
|
return GoogleWorkspaceProviderUser.objects.create(
|
||||||
|
provider=self.provider, user=user, google_id=user.email
|
||||||
|
)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
raise exc
|
||||||
|
else:
|
||||||
|
return GoogleWorkspaceProviderUser.objects.create(
|
||||||
|
provider=self.provider, user=user, google_id=response["primaryEmail"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, user: User, connection: GoogleWorkspaceProviderUser):
|
||||||
|
"""Update existing user"""
|
||||||
|
google_user = self.to_schema(user, False)
|
||||||
|
self.check_email_valid(
|
||||||
|
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
||||||
|
)
|
||||||
|
self._request(
|
||||||
|
self.directory_service.users().update(userKey=connection.google_id, body=google_user)
|
||||||
|
)
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all users and connect them with authentik users if possible"""
|
||||||
|
request = self.directory_service.users().list(
|
||||||
|
customer="my_customer", maxResults=500, orderBy="email"
|
||||||
|
)
|
||||||
|
while request:
|
||||||
|
response = request.execute()
|
||||||
|
for user in response.get("users", []):
|
||||||
|
self._discover_single_user(user)
|
||||||
|
request = self.directory_service.users().list_next(
|
||||||
|
previous_request=request, previous_response=response
|
||||||
|
)
|
||||||
|
|
||||||
|
def _discover_single_user(self, user: dict):
|
||||||
|
"""handle discovery of a single user"""
|
||||||
|
email = user["primaryEmail"]
|
||||||
|
matching_authentik_user = self.provider.get_object_qs(User).filter(email=email).first()
|
||||||
|
if not matching_authentik_user:
|
||||||
|
return
|
||||||
|
GoogleWorkspaceProviderUser.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=matching_authentik_user,
|
||||||
|
google_id=email,
|
||||||
|
)
|
||||||
@ -0,0 +1,167 @@
|
|||||||
|
# Generated by Django 5.0.4 on 2024-05-07 16:03
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderMapping",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"propertymapping_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider Mapping",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Mappings",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.propertymapping",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("delegated_subject", models.EmailField(max_length=254)),
|
||||||
|
("credentials", models.JSONField()),
|
||||||
|
(
|
||||||
|
"scopes",
|
||||||
|
models.TextField(
|
||||||
|
default="https://www.googleapis.com/auth/admin.directory.user,https://www.googleapis.com/auth/admin.directory.group,https://www.googleapis.com/auth/admin.directory.group.member,https://www.googleapis.com/auth/admin.directory.domain.readonly"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("default_group_email_domain", models.TextField()),
|
||||||
|
("exclude_users_service_account", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"user_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"group_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"filter_group",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"property_mappings_group",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Property mappings used for group creation/updating.",
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider",
|
||||||
|
"verbose_name_plural": "Google Workspace Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderGroup",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "group", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "user", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,179 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-09 12:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
replaces = [
|
||||||
|
("authentik_providers_google_workspace", "0001_initial"),
|
||||||
|
(
|
||||||
|
"authentik_providers_google_workspace",
|
||||||
|
"0002_alter_googleworkspaceprovidergroup_options_and_more",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderMapping",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"propertymapping_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider Mapping",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Mappings",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.propertymapping",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("delegated_subject", models.EmailField(max_length=254)),
|
||||||
|
("credentials", models.JSONField()),
|
||||||
|
(
|
||||||
|
"scopes",
|
||||||
|
models.TextField(
|
||||||
|
default="https://www.googleapis.com/auth/admin.directory.user,https://www.googleapis.com/auth/admin.directory.group,https://www.googleapis.com/auth/admin.directory.group.member,https://www.googleapis.com/auth/admin.directory.domain.readonly"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("default_group_email_domain", models.TextField()),
|
||||||
|
("exclude_users_service_account", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"user_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"group_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"filter_group",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"property_mappings_group",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Property mappings used for group creation/updating.",
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider",
|
||||||
|
"verbose_name_plural": "Google Workspace Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderGroup",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "group", "provider")},
|
||||||
|
"verbose_name": "Google Workspace Provider Group",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Groups",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "user", "provider")},
|
||||||
|
"verbose_name": "Google Workspace Provider User",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Users",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-08 14:35
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_google_workspace", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="googleworkspaceprovidergroup",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider Group",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Groups",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="googleworkspaceprovideruser",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider User",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Users",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
191
authentik/enterprise/providers/google_workspace/models.py
Normal file
191
authentik/enterprise/providers/google_workspace/models.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
"""Google workspace sync provider"""
|
||||||
|
|
||||||
|
from typing import Any, Self
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from google.oauth2.service_account import Credentials
|
||||||
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
BackchannelProvider,
|
||||||
|
Group,
|
||||||
|
PropertyMapping,
|
||||||
|
User,
|
||||||
|
UserTypes,
|
||||||
|
)
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction, OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
def default_scopes() -> list[str]:
|
||||||
|
return [
|
||||||
|
"https://www.googleapis.com/auth/admin.directory.user",
|
||||||
|
"https://www.googleapis.com/auth/admin.directory.group",
|
||||||
|
"https://www.googleapis.com/auth/admin.directory.group.member",
|
||||||
|
"https://www.googleapis.com/auth/admin.directory.domain.readonly",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||||
|
"""Sync users from authentik into Google Workspace."""
|
||||||
|
|
||||||
|
delegated_subject = models.EmailField()
|
||||||
|
credentials = models.JSONField()
|
||||||
|
scopes = models.TextField(default=",".join(default_scopes()))
|
||||||
|
|
||||||
|
default_group_email_domain = models.TextField()
|
||||||
|
exclude_users_service_account = models.BooleanField(default=False)
|
||||||
|
user_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
group_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
|
||||||
|
filter_group = models.ForeignKey(
|
||||||
|
"authentik_core.group", on_delete=models.SET_DEFAULT, default=None, null=True
|
||||||
|
)
|
||||||
|
|
||||||
|
property_mappings_group = models.ManyToManyField(
|
||||||
|
PropertyMapping,
|
||||||
|
default=None,
|
||||||
|
blank=True,
|
||||||
|
help_text=_("Property mappings used for group creation/updating."),
|
||||||
|
)
|
||||||
|
|
||||||
|
def client_for_model(
|
||||||
|
self, model: type[User | Group]
|
||||||
|
) -> BaseOutgoingSyncClient[User | Group, Any, Any, Self]:
|
||||||
|
if issubclass(model, User):
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.users import (
|
||||||
|
GoogleWorkspaceUserClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceUserClient(self)
|
||||||
|
if issubclass(model, Group):
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.groups import (
|
||||||
|
GoogleWorkspaceGroupClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceGroupClient(self)
|
||||||
|
raise ValueError(f"Invalid model {model}")
|
||||||
|
|
||||||
|
def get_object_qs(self, type: type[User | Group]) -> QuerySet[User | Group]:
|
||||||
|
if type == User:
|
||||||
|
# Get queryset of all users with consistent ordering
|
||||||
|
# according to the provider's settings
|
||||||
|
base = User.objects.all().exclude_anonymous()
|
||||||
|
if self.exclude_users_service_account:
|
||||||
|
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
|
||||||
|
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
)
|
||||||
|
if self.filter_group:
|
||||||
|
base = base.filter(ak_groups__in=[self.filter_group])
|
||||||
|
return base.order_by("pk")
|
||||||
|
if type == Group:
|
||||||
|
# Get queryset of all groups with consistent ordering
|
||||||
|
return Group.objects.all().order_by("pk")
|
||||||
|
raise ValueError(f"Invalid type {type}")
|
||||||
|
|
||||||
|
def google_credentials(self):
|
||||||
|
return {
|
||||||
|
"credentials": Credentials.from_service_account_info(
|
||||||
|
self.credentials, scopes=self.scopes.split(",")
|
||||||
|
).with_subject(self.delegated_subject),
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-google-workspace-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.providers import (
|
||||||
|
GoogleWorkspaceProviderSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Google Workspace Provider {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider")
|
||||||
|
verbose_name_plural = _("Google Workspace Providers")
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderMapping(PropertyMapping):
|
||||||
|
"""Map authentik data to outgoing Google requests"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-property-mapping-google-workspace-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
||||||
|
GoogleWorkspaceProviderMappingSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderMappingSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Google Workspace Provider Mapping {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider Mapping")
|
||||||
|
verbose_name_plural = _("Google Workspace Provider Mappings")
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderUser(SerializerModel):
|
||||||
|
"""Mapping of a user and provider to a Google user ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
google_id = models.TextField()
|
||||||
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.users import (
|
||||||
|
GoogleWorkspaceProviderUserSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderUserSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider User")
|
||||||
|
verbose_name_plural = _("Google Workspace Provider Users")
|
||||||
|
unique_together = (("google_id", "user", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Google Workspace Provider User {self.user_id} to {self.provider_id}"
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderGroup(SerializerModel):
|
||||||
|
"""Mapping of a group and provider to a Google group ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
google_id = models.TextField()
|
||||||
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.groups import (
|
||||||
|
GoogleWorkspaceProviderGroupSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderGroupSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider Group")
|
||||||
|
verbose_name_plural = _("Google Workspace Provider Groups")
|
||||||
|
unique_together = (("google_id", "group", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Google Workspace Provider Group {self.group_id} to {self.provider_id}"
|
||||||
13
authentik/enterprise/providers/google_workspace/settings.py
Normal file
13
authentik/enterprise/providers/google_workspace/settings.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""Google workspace provider task Settings"""
|
||||||
|
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
|
||||||
|
CELERY_BEAT_SCHEDULE = {
|
||||||
|
"providers_google_workspace_sync": {
|
||||||
|
"task": "authentik.enterprise.providers.google_workspace.tasks.google_workspace_sync_all",
|
||||||
|
"schedule": crontab(minute=fqdn_rand("google_workspace_sync_all"), hour="*/4"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
},
|
||||||
|
}
|
||||||
16
authentik/enterprise/providers/google_workspace/signals.py
Normal file
16
authentik/enterprise/providers/google_workspace/signals.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Google provider signals"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||||
|
from authentik.enterprise.providers.google_workspace.tasks import (
|
||||||
|
google_workspace_sync,
|
||||||
|
google_workspace_sync_direct,
|
||||||
|
google_workspace_sync_m2m,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.signals import register_signals
|
||||||
|
|
||||||
|
register_signals(
|
||||||
|
GoogleWorkspaceProvider,
|
||||||
|
task_sync_single=google_workspace_sync,
|
||||||
|
task_sync_direct=google_workspace_sync_direct,
|
||||||
|
task_sync_m2m=google_workspace_sync_m2m,
|
||||||
|
)
|
||||||
37
authentik/enterprise/providers/google_workspace/tasks.py
Normal file
37
authentik/enterprise/providers/google_workspace/tasks.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""Google Provider tasks"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||||
|
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def google_workspace_sync_objects(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_objects(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(
|
||||||
|
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||||
|
)
|
||||||
|
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
|
||||||
|
"""Run full sync for Google Workspace provider"""
|
||||||
|
return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def google_workspace_sync_all():
|
||||||
|
return sync_tasks.sync_all(google_workspace_sync)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def google_workspace_sync_direct(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def google_workspace_sync_m2m(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||||
14
authentik/enterprise/providers/google_workspace/tests/fixtures/domains_list_v1.json
vendored
Normal file
14
authentik/enterprise/providers/google_workspace/tests/fixtures/domains_list_v1.json
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"kind": "admin#directory#domains",
|
||||||
|
"etag": "\"a1kA7zE2sFLsHiFwgXN9G3effoc9grR2OwUu8_95xD4/uvC5HsKHylhnUtnRV6ZxINODtV0\"",
|
||||||
|
"domains": [
|
||||||
|
{
|
||||||
|
"kind": "admin#directory#domain",
|
||||||
|
"etag": "\"a1kA7zE2sFLsHiFwgXN9G3effoc9grR2OwUu8_95xD4/V4koSPWBFIWuIpAmUamO96QhTLo\"",
|
||||||
|
"domainName": "goauthentik.io",
|
||||||
|
"isPrimary": true,
|
||||||
|
"verified": true,
|
||||||
|
"creationTime": "1543048869840"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@ -0,0 +1,313 @@
|
|||||||
|
"""Google Workspace Group tests"""
|
||||||
|
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.core.tests.utils import create_test_user
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProvider,
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
GoogleWorkspaceProviderMapping,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.tests.utils import load_fixture
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
domains_list_v1_mock = load_fixture("fixtures/domains_list_v1.json")
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceGroupTests(TestCase):
|
||||||
|
"""Google workspace Group tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-google-workspace.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all groups and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple groups
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: GoogleWorkspaceProvider = GoogleWorkspaceProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
credentials={},
|
||||||
|
delegated_subject="",
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
default_group_email_domain="goauthentik.io",
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
GoogleWorkspaceProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/google_workspace/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
GoogleWorkspaceProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/google_workspace/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.api_key = generate_id()
|
||||||
|
|
||||||
|
def test_group_create(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": generate_id()},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 2)
|
||||||
|
|
||||||
|
def test_group_create_update(self):
|
||||||
|
"""Test group updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": ext_id},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"id": ext_id},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
|
||||||
|
group.name = "new name"
|
||||||
|
group.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 4)
|
||||||
|
|
||||||
|
def test_group_create_delete(self):
|
||||||
|
"""Test group deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": ext_id},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}?key={self.api_key}",
|
||||||
|
method="DELETE",
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 4)
|
||||||
|
|
||||||
|
def test_group_create_member_add(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": ext_id},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 8)
|
||||||
|
|
||||||
|
def test_group_create_member_remove(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": ext_id},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members/{uid}%40goauthentik.io?key={self.api_key}",
|
||||||
|
method="DELETE",
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
group.users.remove(user)
|
||||||
|
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 10)
|
||||||
|
|
||||||
|
def test_group_create_delete_do_nothing(self):
|
||||||
|
"""Test group deletion (delete action = do nothing)"""
|
||||||
|
self.provider.group_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"id": uid},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertEqual(len(http.requests()), 3)
|
||||||
|
self.assertFalse(
|
||||||
|
GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group__name=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test group discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||||
|
method="GET",
|
||||||
|
body={"users": []},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||||
|
method="GET",
|
||||||
|
body={"groups": [{"id": uid, "name": uid}]},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups/{uid}?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"id": uid},
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_group = Group.objects.create(
|
||||||
|
name=uid,
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
google_workspace_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
group=different_group, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 5)
|
||||||
@ -0,0 +1,287 @@
|
|||||||
|
"""Google Workspace User tests"""
|
||||||
|
|
||||||
|
from json import loads
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProvider,
|
||||||
|
GoogleWorkspaceProviderMapping,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.tests.utils import load_fixture
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
domains_list_v1_mock = load_fixture("fixtures/domains_list_v1.json")
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceUserTests(TestCase):
|
||||||
|
"""Google workspace User tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-google-workspace.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple users
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: GoogleWorkspaceProvider = GoogleWorkspaceProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
credentials={},
|
||||||
|
delegated_subject="",
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
default_group_email_domain="goauthentik.io",
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
GoogleWorkspaceProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/google_workspace/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
GoogleWorkspaceProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/google_workspace/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.api_key = generate_id()
|
||||||
|
|
||||||
|
def test_user_create(self):
|
||||||
|
"""Test user creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_user)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 2)
|
||||||
|
|
||||||
|
def test_user_create_update(self):
|
||||||
|
"""Test user updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_user)
|
||||||
|
|
||||||
|
user.name = "new name"
|
||||||
|
user.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 4)
|
||||||
|
|
||||||
|
def test_user_create_delete(self):
|
||||||
|
"""Test user deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}",
|
||||||
|
method="DELETE",
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 4)
|
||||||
|
|
||||||
|
def test_user_create_delete_suspend(self):
|
||||||
|
"""Test user deletion (delete action = Suspend)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.SUSPEND
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertEqual(len(http.requests()), 4)
|
||||||
|
_, _, body, _ = http.requests()[3]
|
||||||
|
self.assertEqual(
|
||||||
|
loads(body),
|
||||||
|
{
|
||||||
|
"suspended": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertFalse(
|
||||||
|
GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_user_create_delete_do_nothing(self):
|
||||||
|
"""Test user deletion (delete action = do nothing)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
|
||||||
|
method="POST",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(google_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertEqual(len(http.requests()), 3)
|
||||||
|
self.assertFalse(
|
||||||
|
GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test user discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||||
|
method="GET",
|
||||||
|
body={"users": [{"primaryEmail": f"{uid}@goauthentik.io"}]},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||||
|
method="GET",
|
||||||
|
body={"groups": []},
|
||||||
|
)
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
|
||||||
|
method="PUT",
|
||||||
|
body={"primaryEmail": f"{uid}@goauthentik.io"},
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
google_workspace_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
user=different_user, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 5)
|
||||||
21
authentik/enterprise/providers/google_workspace/urls.py
Normal file
21
authentik/enterprise/providers/google_workspace/urls.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
"""google provider urls"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.groups import (
|
||||||
|
GoogleWorkspaceProviderGroupViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
||||||
|
GoogleWorkspaceProviderMappingViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.providers import (
|
||||||
|
GoogleWorkspaceProviderViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.users import (
|
||||||
|
GoogleWorkspaceProviderUserViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("providers/google_workspace", GoogleWorkspaceProviderViewSet),
|
||||||
|
("providers/google_workspace_users", GoogleWorkspaceProviderUserViewSet),
|
||||||
|
("providers/google_workspace_groups", GoogleWorkspaceProviderGroupViewSet),
|
||||||
|
("propertymappings/provider/google_workspace", GoogleWorkspaceProviderMappingViewSet),
|
||||||
|
]
|
||||||
33
authentik/enterprise/providers/microsoft_entra/api/groups.py
Normal file
33
authentik/enterprise/providers/microsoft_entra/api/groups.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""MicrosoftEntraProviderGroup API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.sources import SourceSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.core.api.users import UserGroupSerializer
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderGroup
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroupSerializer(SourceSerializer):
|
||||||
|
"""MicrosoftEntraProviderGroup Serializer"""
|
||||||
|
|
||||||
|
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = MicrosoftEntraProviderGroup
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"group",
|
||||||
|
"group_obj",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroupViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProviderGroup Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderGroup.objects.all().select_related("group")
|
||||||
|
serializer_class = MicrosoftEntraProviderGroupSerializer
|
||||||
|
filterset_fields = ["provider__id", "group__name", "group__group_uuid"]
|
||||||
|
search_fields = ["provider__name", "group__name"]
|
||||||
|
ordering = ["group__name"]
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
"""microsoft Property mappings API Views"""
|
||||||
|
|
||||||
|
from django_filters.filters import AllValuesMultipleFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderMapping
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingSerializer(PropertyMappingSerializer):
|
||||||
|
"""MicrosoftEntraProviderMapping Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProviderMapping
|
||||||
|
fields = PropertyMappingSerializer.Meta.fields
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingFilter(FilterSet):
|
||||||
|
"""Filter for MicrosoftEntraProviderMapping"""
|
||||||
|
|
||||||
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProviderMapping
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProviderMapping Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderMapping.objects.all()
|
||||||
|
serializer_class = MicrosoftEntraProviderMappingSerializer
|
||||||
|
filterset_class = MicrosoftEntraProviderMappingFilter
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
@ -0,0 +1,52 @@
|
|||||||
|
"""Microsoft Provider API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
|
"""MicrosoftEntraProvider Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"property_mappings",
|
||||||
|
"property_mappings_group",
|
||||||
|
"component",
|
||||||
|
"assigned_backchannel_application_slug",
|
||||||
|
"assigned_backchannel_application_name",
|
||||||
|
"verbose_name",
|
||||||
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
|
"client_id",
|
||||||
|
"client_secret",
|
||||||
|
"tenant_id",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"filter_group",
|
||||||
|
"user_delete_action",
|
||||||
|
"group_delete_action",
|
||||||
|
]
|
||||||
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProvider.objects.all()
|
||||||
|
serializer_class = MicrosoftEntraProviderSerializer
|
||||||
|
filterset_fields = [
|
||||||
|
"name",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"filter_group",
|
||||||
|
]
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
|
sync_single_task = microsoft_entra_sync
|
||||||
33
authentik/enterprise/providers/microsoft_entra/api/users.py
Normal file
33
authentik/enterprise/providers/microsoft_entra/api/users.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""MicrosoftEntraProviderUser API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
|
from authentik.core.api.sources import SourceSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderUser
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUserSerializer(SourceSerializer):
|
||||||
|
"""MicrosoftEntraProviderUser Serializer"""
|
||||||
|
|
||||||
|
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = MicrosoftEntraProviderUser
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"user",
|
||||||
|
"user_obj",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUserViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProviderUser Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderUser.objects.all().select_related("user")
|
||||||
|
serializer_class = MicrosoftEntraProviderUserSerializer
|
||||||
|
filterset_fields = ["provider__id", "user__username", "user__id"]
|
||||||
|
search_fields = ["provider__name", "user__username"]
|
||||||
|
ordering = ["user__username"]
|
||||||
9
authentik/enterprise/providers/microsoft_entra/apps.py
Normal file
9
authentik/enterprise/providers/microsoft_entra/apps.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikEnterpriseProviderMicrosoftEntraConfig(EnterpriseConfig):
|
||||||
|
|
||||||
|
name = "authentik.enterprise.providers.microsoft_entra"
|
||||||
|
label = "authentik_providers_microsoft_entra"
|
||||||
|
verbose_name = "authentik Enterprise.Providers.Microsoft Entra"
|
||||||
|
default = True
|
||||||
100
authentik/enterprise/providers/microsoft_entra/clients/base.py
Normal file
100
authentik/enterprise/providers/microsoft_entra/clients/base.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
from asyncio import run
|
||||||
|
from collections.abc import Coroutine
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from azure.core.exceptions import (
|
||||||
|
ClientAuthenticationError,
|
||||||
|
ServiceRequestError,
|
||||||
|
ServiceResponseError,
|
||||||
|
)
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.db.models import Model
|
||||||
|
from django.http import HttpResponseBadRequest, HttpResponseNotFound
|
||||||
|
from kiota_abstractions.api_error import APIError
|
||||||
|
from kiota_authentication_azure.azure_identity_authentication_provider import (
|
||||||
|
AzureIdentityAuthenticationProvider,
|
||||||
|
)
|
||||||
|
from kiota_http.kiota_client_factory import KiotaClientFactory
|
||||||
|
from msgraph.generated.models.o_data_errors.o_data_error import ODataError
|
||||||
|
from msgraph.graph_request_adapter import GraphRequestAdapter, options
|
||||||
|
from msgraph.graph_service_client import GraphServiceClient
|
||||||
|
from msgraph_core import GraphClientFactory
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_request_adapter(
|
||||||
|
credentials: ClientSecretCredential, scopes: list[str] | None = None
|
||||||
|
) -> GraphRequestAdapter:
|
||||||
|
if scopes:
|
||||||
|
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes)
|
||||||
|
else:
|
||||||
|
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
|
||||||
|
|
||||||
|
return GraphRequestAdapter(
|
||||||
|
auth_provider=auth_provider,
|
||||||
|
client=GraphClientFactory.create_with_default_middleware(
|
||||||
|
options=options, client=KiotaClientFactory.get_default_client()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
|
||||||
|
BaseOutgoingSyncClient[TModel, TConnection, TSchema, MicrosoftEntraProvider]
|
||||||
|
):
|
||||||
|
"""Base client for syncing to microsoft entra"""
|
||||||
|
|
||||||
|
domains: list
|
||||||
|
|
||||||
|
def __init__(self, provider: MicrosoftEntraProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.credentials = provider.microsoft_credentials()
|
||||||
|
self.__prefetch_domains()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client(self):
|
||||||
|
return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials))
|
||||||
|
|
||||||
|
def _request[T](self, request: Coroutine[Any, Any, T]) -> T:
|
||||||
|
try:
|
||||||
|
return run(request)
|
||||||
|
except ClientAuthenticationError as exc:
|
||||||
|
raise StopSync(exc, None, None) from exc
|
||||||
|
except ODataError as exc:
|
||||||
|
raise StopSync(exc, None, None) from exc
|
||||||
|
except (ServiceRequestError, ServiceResponseError) as exc:
|
||||||
|
raise TransientSyncException("Failed to sent request") from exc
|
||||||
|
except APIError as exc:
|
||||||
|
if exc.response_status_code == HttpResponseNotFound.status_code:
|
||||||
|
raise NotFoundSyncException("Object not found") from exc
|
||||||
|
if exc.response_status_code == HttpResponseBadRequest.status_code:
|
||||||
|
raise BadRequestSyncException("Bad request", exc.response_headers) from exc
|
||||||
|
if exc.response_status_code == HTTP_CONFLICT:
|
||||||
|
raise ObjectExistsSyncException("Object exists", exc.response_headers) from exc
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
def __prefetch_domains(self):
|
||||||
|
self.domains = []
|
||||||
|
organizations = self._request(self.client.organization.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for org in organizations.value:
|
||||||
|
self.domains.extend([x.name for x in org.verified_domains])
|
||||||
|
next_link = organizations.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
organizations = self._request(self.client.organization.with_url(next_link).get())
|
||||||
|
|
||||||
|
def check_email_valid(self, *emails: str):
|
||||||
|
for email in emails:
|
||||||
|
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
|
||||||
|
raise BadRequestSyncException(f"Invalid email domain: {email}")
|
||||||
241
authentik/enterprise/providers/microsoft_entra/clients/groups.py
Normal file
241
authentik/enterprise/providers/microsoft_entra/clients/groups.py
Normal file
@ -0,0 +1,241 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
from msgraph.generated.groups.groups_request_builder import GroupsRequestBuilder
|
||||||
|
from msgraph.generated.models.group import Group as MSGroup
|
||||||
|
from msgraph.generated.models.reference_create import ReferenceCreate
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import (
|
||||||
|
PropertyMappingExpressionException,
|
||||||
|
SkipObjectException,
|
||||||
|
)
|
||||||
|
from authentik.core.models import Group
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.base import MicrosoftEntraSyncClient
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraGroupClient(
|
||||||
|
MicrosoftEntraSyncClient[Group, MicrosoftEntraProviderGroup, MSGroup]
|
||||||
|
):
|
||||||
|
"""Microsoft client for groups"""
|
||||||
|
|
||||||
|
connection_type = MicrosoftEntraProviderGroup
|
||||||
|
connection_type_query = "group"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def to_schema(self, obj: Group, creating: bool) -> MSGroup:
|
||||||
|
"""Convert authentik group"""
|
||||||
|
raw_microsoft_group = {}
|
||||||
|
for mapping in (
|
||||||
|
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
|
||||||
|
):
|
||||||
|
if not isinstance(mapping, MicrosoftEntraProviderMapping):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
value = mapping.evaluate(
|
||||||
|
user=None,
|
||||||
|
request=None,
|
||||||
|
group=obj,
|
||||||
|
provider=self.provider,
|
||||||
|
creating=creating,
|
||||||
|
)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
always_merger.merge(raw_microsoft_group, value)
|
||||||
|
except SkipObjectException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except (PropertyMappingExpressionException, ValueError) as exc:
|
||||||
|
# Value error can be raised when assigning invalid data to an attribute
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||||
|
mapping=mapping,
|
||||||
|
).save()
|
||||||
|
raise StopSync(exc, obj, mapping) from exc
|
||||||
|
if not raw_microsoft_group:
|
||||||
|
raise StopSync(ValueError("No group mappings configured"), obj)
|
||||||
|
try:
|
||||||
|
return MSGroup(**raw_microsoft_group)
|
||||||
|
except TypeError as exc:
|
||||||
|
raise StopSync(exc, obj) from exc
|
||||||
|
|
||||||
|
def delete(self, obj: Group):
|
||||||
|
"""Delete group"""
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=obj
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.debug("Group does not exist in Microsoft, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
if self.provider.group_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
self._request(self.client.groups.by_group_id(microsoft_group.microsoft_id).delete())
|
||||||
|
microsoft_group.delete()
|
||||||
|
|
||||||
|
def create(self, group: Group):
|
||||||
|
"""Create group from scratch and create a connection object"""
|
||||||
|
microsoft_group = self.to_schema(group, True)
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.client.groups.post(microsoft_group))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# group already exists in microsoft entra, so we can connect them manually
|
||||||
|
# for groups we need to fetch the group from microsoft as we connect on
|
||||||
|
# ID and not group email
|
||||||
|
query_params = GroupsRequestBuilder.GroupsRequestBuilderGetQueryParameters(
|
||||||
|
filter=f"displayName eq '{microsoft_group.display_name}'",
|
||||||
|
)
|
||||||
|
request_configuration = (
|
||||||
|
GroupsRequestBuilder.GroupsRequestBuilderGetRequestConfiguration(
|
||||||
|
query_parameters=query_params,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
group_data = self._request(self.client.groups.get(request_configuration))
|
||||||
|
if group_data.odata_count < 1:
|
||||||
|
self.logger.warning(
|
||||||
|
"Group which could not be created also does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
return MicrosoftEntraProviderGroup.objects.create(
|
||||||
|
provider=self.provider, group=group, microsoft_id=group_data.value[0].id
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return MicrosoftEntraProviderGroup.objects.create(
|
||||||
|
provider=self.provider, group=group, microsoft_id=response.id
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, group: Group, connection: MicrosoftEntraProviderGroup):
|
||||||
|
"""Update existing group"""
|
||||||
|
microsoft_group = self.to_schema(group, False)
|
||||||
|
microsoft_group.id = connection.microsoft_id
|
||||||
|
try:
|
||||||
|
return self._request(
|
||||||
|
self.client.groups.by_group_id(connection.microsoft_id).patch(microsoft_group)
|
||||||
|
)
|
||||||
|
except NotFoundSyncException:
|
||||||
|
# Resource missing is handled by self.write, which will re-create the group
|
||||||
|
raise
|
||||||
|
|
||||||
|
def write(self, obj: Group):
|
||||||
|
microsoft_group, created = super().write(obj)
|
||||||
|
self.create_sync_members(obj, microsoft_group)
|
||||||
|
return microsoft_group, created
|
||||||
|
|
||||||
|
def create_sync_members(self, obj: Group, microsoft_group: MicrosoftEntraProviderGroup):
|
||||||
|
"""Sync all members after a group was created"""
|
||||||
|
users = list(obj.users.order_by("id").values_list("id", flat=True))
|
||||||
|
connections = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__pk__in=users
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.add, connections)
|
||||||
|
|
||||||
|
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
||||||
|
"""Update a groups members"""
|
||||||
|
if action == Direction.add:
|
||||||
|
return self._patch_add_users(group, users_set)
|
||||||
|
if action == Direction.remove:
|
||||||
|
return self._patch_remove_users(group, users_set)
|
||||||
|
|
||||||
|
def _patch(self, microsoft_group_id: str, direction: Direction, members: list[str]):
|
||||||
|
for user in members:
|
||||||
|
try:
|
||||||
|
if direction == Direction.add:
|
||||||
|
request_body = ReferenceCreate(
|
||||||
|
odata_id=f"https://graph.microsoft.com/v1.0/directoryObjects/{user}",
|
||||||
|
)
|
||||||
|
self._request(
|
||||||
|
self.client.groups.by_group_id(microsoft_group_id).members.ref.post(
|
||||||
|
request_body
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if direction == Direction.remove:
|
||||||
|
self._request(
|
||||||
|
self.client.groups.by_group_id(microsoft_group_id)
|
||||||
|
.members.by_directory_object_id(user)
|
||||||
|
.ref.delete()
|
||||||
|
)
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
pass
|
||||||
|
except TransientSyncException:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _patch_add_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Add users in users_set to group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.add, user_ids)
|
||||||
|
|
||||||
|
def _patch_remove_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Remove users in users_set from group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.remove, user_ids)
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all groups and connect them with authentik groups if possible"""
|
||||||
|
groups = self._request(self.client.groups.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for group in groups.value:
|
||||||
|
self._discover_single_group(group)
|
||||||
|
next_link = groups.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
groups = self._request(self.client.groups.with_url(next_link).get())
|
||||||
|
|
||||||
|
def _discover_single_group(self, group: MSGroup):
|
||||||
|
"""handle discovery of a single group"""
|
||||||
|
microsoft_name = group.unique_name
|
||||||
|
matching_authentik_group = (
|
||||||
|
self.provider.get_object_qs(Group).filter(name=microsoft_name).first()
|
||||||
|
)
|
||||||
|
if not matching_authentik_group:
|
||||||
|
return
|
||||||
|
MicrosoftEntraProviderGroup.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
group=matching_authentik_group,
|
||||||
|
microsoft_id=group.id,
|
||||||
|
)
|
||||||
150
authentik/enterprise/providers/microsoft_entra/clients/users.py
Normal file
150
authentik/enterprise/providers/microsoft_entra/clients/users.py
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.users.users_request_builder import UsersRequestBuilder
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import (
|
||||||
|
PropertyMappingExpressionException,
|
||||||
|
SkipObjectException,
|
||||||
|
)
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.base import MicrosoftEntraSyncClient
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
from authentik.policies.utils import delete_none_values
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProviderUser, MSUser]):
|
||||||
|
"""Sync authentik users into microsoft entra"""
|
||||||
|
|
||||||
|
connection_type = MicrosoftEntraProviderUser
|
||||||
|
connection_type_query = "user"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def to_schema(self, obj: User, creating: bool) -> MSUser:
|
||||||
|
"""Convert authentik user"""
|
||||||
|
raw_microsoft_user = {}
|
||||||
|
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
|
||||||
|
if not isinstance(mapping, MicrosoftEntraProviderMapping):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
value = mapping.evaluate(
|
||||||
|
user=obj,
|
||||||
|
request=None,
|
||||||
|
provider=self.provider,
|
||||||
|
creating=creating,
|
||||||
|
)
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
always_merger.merge(raw_microsoft_user, value)
|
||||||
|
except SkipObjectException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except (PropertyMappingExpressionException, ValueError) as exc:
|
||||||
|
# Value error can be raised when assigning invalid data to an attribute
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||||
|
mapping=mapping,
|
||||||
|
).save()
|
||||||
|
raise StopSync(exc, obj, mapping) from exc
|
||||||
|
if not raw_microsoft_user:
|
||||||
|
raise StopSync(ValueError("No user mappings configured"), obj)
|
||||||
|
try:
|
||||||
|
return MSUser(**delete_none_values(raw_microsoft_user))
|
||||||
|
except TypeError as exc:
|
||||||
|
raise StopSync(exc, obj) from exc
|
||||||
|
|
||||||
|
def delete(self, obj: User):
|
||||||
|
"""Delete user"""
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=obj
|
||||||
|
).first()
|
||||||
|
if not microsoft_user:
|
||||||
|
self.logger.debug("User does not exist in Microsoft, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
response = None
|
||||||
|
if self.provider.user_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
response = self._request(
|
||||||
|
self.client.users.by_user_id(microsoft_user.microsoft_id).delete()
|
||||||
|
)
|
||||||
|
elif self.provider.user_delete_action == OutgoingSyncDeleteAction.SUSPEND:
|
||||||
|
response = self._request(
|
||||||
|
self.client.users.by_user_id(microsoft_user.microsoft_id).patch(
|
||||||
|
MSUser(account_enabled=False)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
microsoft_user.delete()
|
||||||
|
return response
|
||||||
|
|
||||||
|
def create(self, user: User):
|
||||||
|
"""Create user from scratch and create a connection object"""
|
||||||
|
microsoft_user = self.to_schema(user, True)
|
||||||
|
self.check_email_valid(microsoft_user.user_principal_name)
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.client.users.post(microsoft_user))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# user already exists in microsoft entra, so we can connect them manually
|
||||||
|
query_params = UsersRequestBuilder.UsersRequestBuilderGetQueryParameters()(
|
||||||
|
filter=f"mail eq '{microsoft_user.mail}'",
|
||||||
|
)
|
||||||
|
request_configuration = (
|
||||||
|
UsersRequestBuilder.UsersRequestBuilderGetRequestConfiguration(
|
||||||
|
query_parameters=query_params,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
user_data = self._request(self.client.users.get(request_configuration))
|
||||||
|
if user_data.odata_count < 1:
|
||||||
|
self.logger.warning(
|
||||||
|
"User which could not be created also does not exist", user=user
|
||||||
|
)
|
||||||
|
return
|
||||||
|
return MicrosoftEntraProviderUser.objects.create(
|
||||||
|
provider=self.provider, user=user, microsoft_id=user_data.value[0].id
|
||||||
|
)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
raise exc
|
||||||
|
else:
|
||||||
|
return MicrosoftEntraProviderUser.objects.create(
|
||||||
|
provider=self.provider, user=user, microsoft_id=response.id
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, user: User, connection: MicrosoftEntraProviderUser):
|
||||||
|
"""Update existing user"""
|
||||||
|
microsoft_user = self.to_schema(user, False)
|
||||||
|
self.check_email_valid(microsoft_user.user_principal_name)
|
||||||
|
self._request(self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user))
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all users and connect them with authentik users if possible"""
|
||||||
|
users = self._request(self.client.users.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for user in users.value:
|
||||||
|
self._discover_single_user(user)
|
||||||
|
next_link = users.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
users = self._request(self.client.users.with_url(next_link).get())
|
||||||
|
|
||||||
|
def _discover_single_user(self, user: MSUser):
|
||||||
|
"""handle discovery of a single user"""
|
||||||
|
matching_authentik_user = self.provider.get_object_qs(User).filter(email=user.mail).first()
|
||||||
|
if not matching_authentik_user:
|
||||||
|
return
|
||||||
|
MicrosoftEntraProviderUser.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=matching_authentik_user,
|
||||||
|
microsoft_id=user.id,
|
||||||
|
)
|
||||||
@ -0,0 +1,165 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-08 14:35
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderMapping",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"propertymapping_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider Mapping",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider Mappings",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.propertymapping",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("client_id", models.TextField()),
|
||||||
|
("client_secret", models.TextField()),
|
||||||
|
("tenant_id", models.TextField()),
|
||||||
|
("exclude_users_service_account", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"user_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"group_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"filter_group",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"property_mappings_group",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Property mappings used for group creation/updating.",
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderGroup",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("microsoft_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_microsoft_entra.microsoftentraprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider Group",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider Groups",
|
||||||
|
"unique_together": {("microsoft_id", "group", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("microsoft_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_microsoft_entra.microsoftentraprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider User",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider User",
|
||||||
|
"unique_together": {("microsoft_id", "user", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
180
authentik/enterprise/providers/microsoft_entra/models.py
Normal file
180
authentik/enterprise/providers/microsoft_entra/models.py
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
"""Microsoft Entra sync provider"""
|
||||||
|
|
||||||
|
from typing import Any, Self
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
BackchannelProvider,
|
||||||
|
Group,
|
||||||
|
PropertyMapping,
|
||||||
|
User,
|
||||||
|
UserTypes,
|
||||||
|
)
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction, OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||||
|
"""Sync users from authentik into Microsoft Entra."""
|
||||||
|
|
||||||
|
client_id = models.TextField()
|
||||||
|
client_secret = models.TextField()
|
||||||
|
tenant_id = models.TextField()
|
||||||
|
|
||||||
|
exclude_users_service_account = models.BooleanField(default=False)
|
||||||
|
user_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
group_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
filter_group = models.ForeignKey(
|
||||||
|
"authentik_core.group", on_delete=models.SET_DEFAULT, default=None, null=True
|
||||||
|
)
|
||||||
|
|
||||||
|
property_mappings_group = models.ManyToManyField(
|
||||||
|
PropertyMapping,
|
||||||
|
default=None,
|
||||||
|
blank=True,
|
||||||
|
help_text=_("Property mappings used for group creation/updating."),
|
||||||
|
)
|
||||||
|
|
||||||
|
def client_for_model(
|
||||||
|
self, model: type[User | Group]
|
||||||
|
) -> BaseOutgoingSyncClient[User | Group, Any, Any, Self]:
|
||||||
|
if issubclass(model, User):
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.users import (
|
||||||
|
MicrosoftEntraUserClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraUserClient(self)
|
||||||
|
if issubclass(model, Group):
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.groups import (
|
||||||
|
MicrosoftEntraGroupClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraGroupClient(self)
|
||||||
|
raise ValueError(f"Invalid model {model}")
|
||||||
|
|
||||||
|
def get_object_qs(self, type: type[User | Group]) -> QuerySet[User | Group]:
|
||||||
|
if type == User:
|
||||||
|
# Get queryset of all users with consistent ordering
|
||||||
|
# according to the provider's settings
|
||||||
|
base = User.objects.all().exclude_anonymous()
|
||||||
|
if self.exclude_users_service_account:
|
||||||
|
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
|
||||||
|
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
)
|
||||||
|
if self.filter_group:
|
||||||
|
base = base.filter(ak_groups__in=[self.filter_group])
|
||||||
|
return base.order_by("pk")
|
||||||
|
if type == Group:
|
||||||
|
# Get queryset of all groups with consistent ordering
|
||||||
|
return Group.objects.all().order_by("pk")
|
||||||
|
raise ValueError(f"Invalid type {type}")
|
||||||
|
|
||||||
|
def microsoft_credentials(self):
|
||||||
|
return {
|
||||||
|
"credentials": ClientSecretCredential(
|
||||||
|
self.tenant_id, self.client_id, self.client_secret
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-microsoft-entra-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.providers import (
|
||||||
|
MicrosoftEntraProviderSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Microsoft Entra Provider {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Providers")
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMapping(PropertyMapping):
|
||||||
|
"""Map authentik data to outgoing Microsoft requests"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-property-mapping-microsoft-entra-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.property_mappings import (
|
||||||
|
MicrosoftEntraProviderMappingSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderMappingSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Microsoft Entra Provider Mapping {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider Mapping")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider Mappings")
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUser(SerializerModel):
|
||||||
|
"""Mapping of a user and provider to a Microsoft user ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
microsoft_id = models.TextField()
|
||||||
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(MicrosoftEntraProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.users import (
|
||||||
|
MicrosoftEntraProviderUserSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderUserSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider User")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider User")
|
||||||
|
unique_together = (("microsoft_id", "user", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Microsoft Entra Provider User {self.user_id} to {self.provider_id}"
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroup(SerializerModel):
|
||||||
|
"""Mapping of a group and provider to a Microsoft group ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
microsoft_id = models.TextField()
|
||||||
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(MicrosoftEntraProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.groups import (
|
||||||
|
MicrosoftEntraProviderGroupSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderGroupSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider Group")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider Groups")
|
||||||
|
unique_together = (("microsoft_id", "group", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Microsoft Entra Provider Group {self.group_id} to {self.provider_id}"
|
||||||
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""Microsoft Entra provider task Settings"""
|
||||||
|
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
|
||||||
|
CELERY_BEAT_SCHEDULE = {
|
||||||
|
"providers_microsoft_entra_sync": {
|
||||||
|
"task": "authentik.enterprise.providers.microsoft_entra.tasks.microsoft_entra_sync_all",
|
||||||
|
"schedule": crontab(minute=fqdn_rand("microsoft_entra_sync_all"), hour="*/4"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
},
|
||||||
|
}
|
||||||
16
authentik/enterprise/providers/microsoft_entra/signals.py
Normal file
16
authentik/enterprise/providers/microsoft_entra/signals.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Microsoft provider signals"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import (
|
||||||
|
microsoft_entra_sync,
|
||||||
|
microsoft_entra_sync_direct,
|
||||||
|
microsoft_entra_sync_m2m,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.signals import register_signals
|
||||||
|
|
||||||
|
register_signals(
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
task_sync_single=microsoft_entra_sync,
|
||||||
|
task_sync_direct=microsoft_entra_sync_direct,
|
||||||
|
task_sync_m2m=microsoft_entra_sync_m2m,
|
||||||
|
)
|
||||||
37
authentik/enterprise/providers/microsoft_entra/tasks.py
Normal file
37
authentik/enterprise/providers/microsoft_entra/tasks.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""Microsoft Entra Provider tasks"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||||
|
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
sync_tasks = SyncTasks(MicrosoftEntraProvider)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_objects(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_objects(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(
|
||||||
|
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||||
|
)
|
||||||
|
def microsoft_entra_sync(self, provider_pk: int, *args, **kwargs):
|
||||||
|
"""Run full sync for Microsoft Entra provider"""
|
||||||
|
return sync_tasks.sync_single(self, provider_pk, microsoft_entra_sync_objects)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def microsoft_entra_sync_all():
|
||||||
|
return sync_tasks.sync_all(microsoft_entra_sync)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_direct(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_m2m(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||||
@ -0,0 +1,392 @@
|
|||||||
|
"""Microsoft Entra Group tests"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.test import TestCase
|
||||||
|
from msgraph.generated.models.group import Group as MSGroup
|
||||||
|
from msgraph.generated.models.group_collection_response import GroupCollectionResponse
|
||||||
|
from msgraph.generated.models.organization import Organization
|
||||||
|
from msgraph.generated.models.organization_collection_response import OrganizationCollectionResponse
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.models.user_collection_response import UserCollectionResponse
|
||||||
|
from msgraph.generated.models.verified_domain import VerifiedDomain
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.core.tests.utils import create_test_user
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraGroupTests(TestCase):
|
||||||
|
"""Microsoft Entra Group tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-microsoft-entra.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all groups and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple groups
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: MicrosoftEntraProvider = MicrosoftEntraProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_id(),
|
||||||
|
tenant_id=generate_id(),
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.creds = ClientSecretCredential(generate_id(), generate_id(), generate_id())
|
||||||
|
|
||||||
|
def test_group_create(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=generate_id())),
|
||||||
|
) as group_create,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_update(self):
|
||||||
|
"""Test group updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_patch,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.name = "new name"
|
||||||
|
group.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_patch.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_delete(self):
|
||||||
|
"""Test group deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.delete",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_delete,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_delete.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_member_add(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.ref.ref_request_builder.RefRequestBuilder.post",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_add,
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
group_create.assert_called_once()
|
||||||
|
member_add.assert_called_once()
|
||||||
|
self.assertEqual(
|
||||||
|
member_add.call_args[0][0].odata_id,
|
||||||
|
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider,
|
||||||
|
).first().microsoft_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_group_create_member_remove(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.ref.ref_request_builder.RefRequestBuilder.post",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_add,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.item.ref.ref_request_builder.RefRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_remove,
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
group.users.remove(user)
|
||||||
|
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
group_create.assert_called_once()
|
||||||
|
member_add.assert_called_once()
|
||||||
|
self.assertEqual(
|
||||||
|
member_add.call_args[0][0].odata_id,
|
||||||
|
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider,
|
||||||
|
).first().microsoft_id}",
|
||||||
|
)
|
||||||
|
member_remove.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_delete_do_nothing(self):
|
||||||
|
"""Test group deletion (delete action = do nothing)"""
|
||||||
|
self.provider.group_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.delete",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_delete,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group__name=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test group discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_group = Group.objects.create(
|
||||||
|
name=uid,
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=UserCollectionResponse(
|
||||||
|
value=[MSUser(mail=f"{uid}@goauthentik.io", id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as user_list,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=GroupCollectionResponse(
|
||||||
|
value=[MSGroup(display_name=uid, unique_name=uid, id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as group_list,
|
||||||
|
):
|
||||||
|
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
group=different_group, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_list.assert_called_once()
|
||||||
|
group_list.assert_called_once()
|
||||||
@ -0,0 +1,337 @@
|
|||||||
|
"""Microsoft Entra User tests"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.test import TestCase
|
||||||
|
from msgraph.generated.models.group_collection_response import GroupCollectionResponse
|
||||||
|
from msgraph.generated.models.organization import Organization
|
||||||
|
from msgraph.generated.models.organization_collection_response import OrganizationCollectionResponse
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.models.user_collection_response import UserCollectionResponse
|
||||||
|
from msgraph.generated.models.verified_domain import VerifiedDomain
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraUserTests(TestCase):
|
||||||
|
"""Microsoft Entra User tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-microsoft-entra.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple users
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: MicrosoftEntraProvider = MicrosoftEntraProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_id(),
|
||||||
|
tenant_id=generate_id(),
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.creds = ClientSecretCredential(generate_id(), generate_id(), generate_id())
|
||||||
|
|
||||||
|
def test_user_create(self):
|
||||||
|
"""Test user creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_create_update(self):
|
||||||
|
"""Test user updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.name = "new name"
|
||||||
|
user.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_create_delete(self):
|
||||||
|
"""Test user deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_delete.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_create_delete_suspend(self):
|
||||||
|
"""Test user deletion (delete action = Suspend)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.SUSPEND
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_called_once()
|
||||||
|
self.assertFalse(user_patch.call_args[0][0].account_enabled)
|
||||||
|
user_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_user_create_delete_do_nothing(self):
|
||||||
|
"""Test user deletion (delete action = do nothing)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_not_called()
|
||||||
|
user_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test user discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=UserCollectionResponse(
|
||||||
|
value=[MSUser(mail=f"{uid}@goauthentik.io", id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as user_list,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.get",
|
||||||
|
AsyncMock(return_value=GroupCollectionResponse(value=[])),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user=different_user, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_list.assert_called_once()
|
||||||
21
authentik/enterprise/providers/microsoft_entra/urls.py
Normal file
21
authentik/enterprise/providers/microsoft_entra/urls.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
"""microsoft provider urls"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.groups import (
|
||||||
|
MicrosoftEntraProviderGroupViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.property_mappings import (
|
||||||
|
MicrosoftEntraProviderMappingViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.providers import (
|
||||||
|
MicrosoftEntraProviderViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.users import (
|
||||||
|
MicrosoftEntraProviderUserViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("providers/microsoft_entra", MicrosoftEntraProviderViewSet),
|
||||||
|
("providers/microsoft_entra_users", MicrosoftEntraProviderUserViewSet),
|
||||||
|
("providers/microsoft_entra_groups", MicrosoftEntraProviderGroupViewSet),
|
||||||
|
("propertymappings/provider/microsoft_entra", MicrosoftEntraProviderMappingViewSet),
|
||||||
|
]
|
||||||
@ -11,7 +11,7 @@ from django.utils.translation import gettext as _
|
|||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User, default_token_key
|
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User, default_token_key
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
|
|||||||
@ -14,6 +14,8 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
|
|
||||||
TENANT_APPS = [
|
TENANT_APPS = [
|
||||||
"authentik.enterprise.audit",
|
"authentik.enterprise.audit",
|
||||||
|
"authentik.enterprise.providers.google_workspace",
|
||||||
|
"authentik.enterprise.providers.microsoft_entra",
|
||||||
"authentik.enterprise.providers.rac",
|
"authentik.enterprise.providers.rac",
|
||||||
"authentik.enterprise.stages.source",
|
"authentik.enterprise.stages.source",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -60,6 +60,8 @@ class SystemTaskSerializer(ModelSerializer):
|
|||||||
"duration",
|
"duration",
|
||||||
"status",
|
"status",
|
||||||
"messages",
|
"messages",
|
||||||
|
"expires",
|
||||||
|
"expiring",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from django.db import migrations, models
|
|||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
import authentik.events.models
|
import authentik.events.models
|
||||||
import authentik.lib.models
|
import authentik.lib.validators
|
||||||
from authentik.lib.migrations import progress_bar
|
from authentik.lib.migrations import progress_bar
|
||||||
|
|
||||||
|
|
||||||
@ -377,7 +377,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name="notificationtransport",
|
model_name="notificationtransport",
|
||||||
name="webhook_url",
|
name="webhook_url",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
blank=True, validators=[authentik.lib.models.DomainlessURLValidator()]
|
blank=True, validators=[authentik.lib.validators.DomainlessURLValidator()]
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@ -41,10 +41,11 @@ from authentik.events.utils import (
|
|||||||
sanitize_dict,
|
sanitize_dict,
|
||||||
sanitize_item,
|
sanitize_item,
|
||||||
)
|
)
|
||||||
from authentik.lib.models import DomainlessURLValidator, SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.lib.utils.http import get_http_session
|
from authentik.lib.utils.http import get_http_session
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from authentik.lib.validators import DomainlessURLValidator
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
from authentik.root.middleware import ClientIPMiddleware
|
from authentik.root.middleware import ClientIPMiddleware
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
|
|||||||
@ -6,7 +6,7 @@ from typing import Any
|
|||||||
|
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
from tenant_schemas_celery.task import TenantTask
|
from tenant_schemas_celery.task import TenantTask
|
||||||
|
|
||||||
from authentik.events.logs import LogEvent
|
from authentik.events.logs import LogEvent
|
||||||
@ -15,12 +15,12 @@ from authentik.events.models import SystemTask as DBSystemTask
|
|||||||
from authentik.events.utils import sanitize_item
|
from authentik.events.utils import sanitize_item
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
class SystemTask(TenantTask):
|
class SystemTask(TenantTask):
|
||||||
"""Task which can save its state to the cache"""
|
"""Task which can save its state to the cache"""
|
||||||
|
|
||||||
|
logger: BoundLogger
|
||||||
|
|
||||||
# For tasks that should only be listed if they failed, set this to False
|
# For tasks that should only be listed if they failed, set this to False
|
||||||
save_on_success: bool
|
save_on_success: bool
|
||||||
|
|
||||||
@ -63,6 +63,7 @@ class SystemTask(TenantTask):
|
|||||||
def before_start(self, task_id, args, kwargs):
|
def before_start(self, task_id, args, kwargs):
|
||||||
self._start_precise = perf_counter()
|
self._start_precise = perf_counter()
|
||||||
self._start = now()
|
self._start = now()
|
||||||
|
self.logger = get_logger().bind(task_id=task_id)
|
||||||
return super().before_start(task_id, args, kwargs)
|
return super().before_start(task_id, args, kwargs)
|
||||||
|
|
||||||
def db(self) -> DBSystemTask | None:
|
def db(self) -> DBSystemTask | None:
|
||||||
|
|||||||
@ -4,7 +4,7 @@ from django.db.models.query_utils import Q
|
|||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.events.models import (
|
from authentik.events.models import (
|
||||||
Event,
|
Event,
|
||||||
|
|||||||
@ -33,7 +33,6 @@ from authentik.lib.utils.file import (
|
|||||||
)
|
)
|
||||||
from authentik.lib.views import bad_request_message
|
from authentik.lib.views import bad_request_message
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -278,7 +277,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
|||||||
400: OpenApiResponse(description="Flow not applicable"),
|
400: OpenApiResponse(description="Flow not applicable"),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def execute(self, request: Request, slug: str):
|
def execute(self, request: Request, slug: str):
|
||||||
"""Execute flow for current user"""
|
"""Execute flow for current user"""
|
||||||
# Because we pre-plan the flow here, and not in the planner, we need to manually clear
|
# Because we pre-plan the flow here, and not in the planner, we need to manually clear
|
||||||
|
|||||||
@ -9,6 +9,7 @@ from typing import Any
|
|||||||
|
|
||||||
from cachetools import TLRUCache, cached
|
from cachetools import TLRUCache, cached
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
|
from django.utils.text import slugify
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from sentry_sdk.hub import Hub
|
from sentry_sdk.hub import Hub
|
||||||
@ -56,6 +57,7 @@ class BaseEvaluator:
|
|||||||
"requests": get_http_session(),
|
"requests": get_http_session(),
|
||||||
"resolve_dns": BaseEvaluator.expr_resolve_dns,
|
"resolve_dns": BaseEvaluator.expr_resolve_dns,
|
||||||
"reverse_dns": BaseEvaluator.expr_reverse_dns,
|
"reverse_dns": BaseEvaluator.expr_reverse_dns,
|
||||||
|
"slugify": slugify,
|
||||||
}
|
}
|
||||||
self._context = {}
|
self._context = {}
|
||||||
|
|
||||||
|
|||||||
@ -100,6 +100,8 @@ def get_logger_config():
|
|||||||
"fsevents": "WARNING",
|
"fsevents": "WARNING",
|
||||||
"uvicorn": "WARNING",
|
"uvicorn": "WARNING",
|
||||||
"gunicorn": "INFO",
|
"gunicorn": "INFO",
|
||||||
|
"requests_mock": "WARNING",
|
||||||
|
"hpack": "WARNING",
|
||||||
}
|
}
|
||||||
for handler_name, level in handler_level_map.items():
|
for handler_name, level in handler_level_map.items():
|
||||||
base_config["loggers"][handler_name] = {
|
base_config["loggers"][handler_name] = {
|
||||||
|
|||||||
@ -1,13 +1,16 @@
|
|||||||
"""Generic models"""
|
"""Generic models"""
|
||||||
|
|
||||||
import re
|
from typing import Any
|
||||||
|
|
||||||
from django.core.validators import URLValidator
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.regex_helper import _lazy_re_compile
|
from django.dispatch import Signal
|
||||||
|
from django.utils import timezone
|
||||||
from model_utils.managers import InheritanceManager
|
from model_utils.managers import InheritanceManager
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
|
||||||
|
pre_soft_delete = Signal()
|
||||||
|
post_soft_delete = Signal()
|
||||||
|
|
||||||
|
|
||||||
class SerializerModel(models.Model):
|
class SerializerModel(models.Model):
|
||||||
"""Base Abstract Model which has a serializer"""
|
"""Base Abstract Model which has a serializer"""
|
||||||
@ -51,46 +54,57 @@ class InheritanceForeignKey(models.ForeignKey):
|
|||||||
forward_related_accessor_class = InheritanceForwardManyToOneDescriptor
|
forward_related_accessor_class = InheritanceForwardManyToOneDescriptor
|
||||||
|
|
||||||
|
|
||||||
class DomainlessURLValidator(URLValidator):
|
class SoftDeleteQuerySet(models.QuerySet):
|
||||||
"""Subclass of URLValidator which doesn't check the domain
|
|
||||||
(to allow hostnames without domain)"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None:
|
def delete(self):
|
||||||
super().__init__(*args, **kwargs)
|
for obj in self.all():
|
||||||
self.host_re = "(" + self.hostname_re + self.domain_re + "|localhost)"
|
obj.delete()
|
||||||
self.regex = _lazy_re_compile(
|
|
||||||
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
|
def hard_delete(self):
|
||||||
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
|
return super().delete()
|
||||||
r"(?:" + self.ipv4_re + "|" + self.ipv6_re + "|" + self.host_re + ")"
|
|
||||||
r"(?::\d{2,5})?" # port
|
|
||||||
r"(?:[/?#][^\s]*)?" # resource path
|
class SoftDeleteManager(models.Manager):
|
||||||
r"\Z",
|
|
||||||
re.IGNORECASE,
|
def get_queryset(self):
|
||||||
|
return SoftDeleteQuerySet(self.model, using=self._db).filter(deleted_at__isnull=True)
|
||||||
|
|
||||||
|
|
||||||
|
class DeletedSoftDeleteManager(models.Manager):
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return super().get_queryset().exclude(deleted_at__isnull=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SoftDeleteModel(models.Model):
|
||||||
|
"""Model which doesn't fully delete itself, but rather saved the delete status
|
||||||
|
so cleanup events can run."""
|
||||||
|
|
||||||
|
deleted_at = models.DateTimeField(blank=True, null=True)
|
||||||
|
|
||||||
|
objects = SoftDeleteManager()
|
||||||
|
deleted = DeletedSoftDeleteManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_deleted(self):
|
||||||
|
return self.deleted_at is not None
|
||||||
|
|
||||||
|
def delete(self, using: Any = ..., keep_parents: bool = ...) -> tuple[int, dict[str, int]]:
|
||||||
|
pre_soft_delete.send(sender=self.__class__, instance=self)
|
||||||
|
now = timezone.now()
|
||||||
|
self.deleted_at = now
|
||||||
|
self.save(
|
||||||
|
update_fields=[
|
||||||
|
"deleted_at",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
self.schemes = ["http", "https", "blank"] + list(self.schemes)
|
post_soft_delete.send(sender=self.__class__, instance=self)
|
||||||
|
return tuple()
|
||||||
|
|
||||||
def __call__(self, value: str):
|
def force_delete(self, using: Any = ...):
|
||||||
# Check if the scheme is valid.
|
if not self.deleted_at:
|
||||||
scheme = value.split("://")[0].lower()
|
raise models.ProtectedError("Refusing to force delete non-deleted model", {self})
|
||||||
if scheme not in self.schemes:
|
return super().delete(using=using)
|
||||||
value = "default" + value
|
|
||||||
super().__call__(value)
|
|
||||||
|
|
||||||
|
|
||||||
class DomainlessFormattedURLValidator(DomainlessURLValidator):
|
|
||||||
"""URL validator which allows for python format strings"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None:
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.formatter_re = r"([%\(\)a-zA-Z])*"
|
|
||||||
self.host_re = "(" + self.formatter_re + self.hostname_re + self.domain_re + "|localhost)"
|
|
||||||
self.regex = _lazy_re_compile(
|
|
||||||
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
|
|
||||||
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
|
|
||||||
r"(?:" + self.ipv4_re + "|" + self.ipv6_re + "|" + self.host_re + ")"
|
|
||||||
r"(?::\d{2,5})?" # port
|
|
||||||
r"(?:[/?#][^\s]*)?" # resource path
|
|
||||||
r"\Z",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
self.schemes = ["http", "https", "blank"] + list(self.schemes)
|
|
||||||
|
|||||||
0
authentik/lib/sync/__init__.py
Normal file
0
authentik/lib/sync/__init__.py
Normal file
5
authentik/lib/sync/outgoing/__init__.py
Normal file
5
authentik/lib/sync/outgoing/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Sync constants"""
|
||||||
|
|
||||||
|
PAGE_SIZE = 100
|
||||||
|
PAGE_TIMEOUT = 60 * 60 * 0.5 # Half an hour
|
||||||
|
HTTP_CONFLICT = 409
|
||||||
54
authentik/lib/sync/outgoing/api.py
Normal file
54
authentik/lib/sync/outgoing/api.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from django.utils.text import slugify
|
||||||
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.fields import BooleanField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.events.api.tasks import SystemTaskSerializer
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
class SyncStatusSerializer(PassiveSerializer):
|
||||||
|
"""Provider sync status"""
|
||||||
|
|
||||||
|
is_running = BooleanField(read_only=True)
|
||||||
|
tasks = SystemTaskSerializer(many=True, read_only=True)
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingSyncProviderStatusMixin:
|
||||||
|
"""Common API Endpoints for Outgoing sync providers"""
|
||||||
|
|
||||||
|
sync_single_task: Callable = None
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
responses={
|
||||||
|
200: SyncStatusSerializer(),
|
||||||
|
404: OpenApiResponse(description="Task not found"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@action(
|
||||||
|
methods=["GET"],
|
||||||
|
detail=True,
|
||||||
|
pagination_class=None,
|
||||||
|
url_path="sync/status",
|
||||||
|
filter_backends=[],
|
||||||
|
)
|
||||||
|
def sync_status(self, request: Request, pk: int) -> Response:
|
||||||
|
"""Get provider's sync status"""
|
||||||
|
provider: OutgoingSyncProvider = self.get_object()
|
||||||
|
tasks = list(
|
||||||
|
get_objects_for_user(request.user, "authentik_events.view_systemtask").filter(
|
||||||
|
name=self.sync_single_task.__name__,
|
||||||
|
uid=slugify(provider.name),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
status = {
|
||||||
|
"tasks": tasks,
|
||||||
|
"is_running": provider.sync_lock.locked(),
|
||||||
|
}
|
||||||
|
return Response(SyncStatusSerializer(status).data)
|
||||||
82
authentik/lib/sync/outgoing/base.py
Normal file
82
authentik/lib/sync/outgoing/base.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
"""Basic outgoing sync Client"""
|
||||||
|
|
||||||
|
from enum import StrEnum
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from django.db import DatabaseError
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import NotFoundSyncException
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from django.db.models import Model
|
||||||
|
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
class Direction(StrEnum):
|
||||||
|
|
||||||
|
add = "add"
|
||||||
|
remove = "remove"
|
||||||
|
|
||||||
|
|
||||||
|
class BaseOutgoingSyncClient[
|
||||||
|
TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider"
|
||||||
|
]:
|
||||||
|
"""Basic Outgoing sync client Client"""
|
||||||
|
|
||||||
|
provider: TProvider
|
||||||
|
connection_type: type[TConnection]
|
||||||
|
connection_type_query: str
|
||||||
|
|
||||||
|
can_discover = False
|
||||||
|
|
||||||
|
def __init__(self, provider: TProvider):
|
||||||
|
self.logger = get_logger().bind(provider=provider.name)
|
||||||
|
self.provider = provider
|
||||||
|
|
||||||
|
def create(self, obj: TModel) -> TConnection:
|
||||||
|
"""Create object in remote destination"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def update(self, obj: TModel, connection: TConnection):
|
||||||
|
"""Update object in remote destination"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def write(self, obj: TModel) -> tuple[TConnection, bool]:
|
||||||
|
"""Write object to destination. Uses self.create and self.update, but
|
||||||
|
can be overwritten for further logic"""
|
||||||
|
connection = self.connection_type.objects.filter(
|
||||||
|
provider=self.provider, **{self.connection_type_query: obj}
|
||||||
|
).first()
|
||||||
|
try:
|
||||||
|
if not connection:
|
||||||
|
connection = self.create(obj)
|
||||||
|
return connection, True
|
||||||
|
try:
|
||||||
|
self.update(obj, connection)
|
||||||
|
return connection, False
|
||||||
|
except NotFoundSyncException:
|
||||||
|
connection.delete()
|
||||||
|
connection = self.create(obj)
|
||||||
|
return connection, True
|
||||||
|
except DatabaseError as exc:
|
||||||
|
self.logger.warning("Failed to write object", obj=obj, exc=exc)
|
||||||
|
if connection:
|
||||||
|
connection.delete()
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def delete(self, obj: TModel):
|
||||||
|
"""Delete object from destination"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def to_schema(self, obj: TModel, creating: bool) -> TSchema:
|
||||||
|
"""Convert object to destination schema"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Optional method. Can be used to implement a "discovery" where
|
||||||
|
upon creation of this provider, this function will be called and can
|
||||||
|
pre-link any users/groups in the remote system with the respective
|
||||||
|
object in authentik based on a common identifier"""
|
||||||
|
raise NotImplementedError()
|
||||||
41
authentik/lib/sync/outgoing/exceptions.py
Normal file
41
authentik/lib/sync/outgoing/exceptions.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
|
|
||||||
|
|
||||||
|
class BaseSyncException(SentryIgnoredException):
|
||||||
|
"""Base class for all sync exceptions"""
|
||||||
|
|
||||||
|
|
||||||
|
class TransientSyncException(BaseSyncException):
|
||||||
|
"""Transient sync exception which may be caused by network blips, etc"""
|
||||||
|
|
||||||
|
|
||||||
|
class NotFoundSyncException(BaseSyncException):
|
||||||
|
"""Exception when an object was not found in the remote system"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectExistsSyncException(BaseSyncException):
|
||||||
|
"""Exception when an object already exists in the remote system"""
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestSyncException(BaseSyncException):
|
||||||
|
"""Exception when invalid data was sent to the remote system"""
|
||||||
|
|
||||||
|
|
||||||
|
class StopSync(BaseSyncException):
|
||||||
|
"""Exception raised when a configuration error should stop the sync process"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, exc: Exception, obj: object | None = None, mapping: object | None = None
|
||||||
|
) -> None:
|
||||||
|
self.exc = exc
|
||||||
|
self.obj = obj
|
||||||
|
self.mapping = mapping
|
||||||
|
|
||||||
|
def detail(self) -> str:
|
||||||
|
"""Get human readable details of this error"""
|
||||||
|
msg = f"Error {str(self.exc)}"
|
||||||
|
if self.obj:
|
||||||
|
msg += f", caused by {self.obj}"
|
||||||
|
if self.mapping:
|
||||||
|
msg += f" (mapping {self.mapping})"
|
||||||
|
return msg
|
||||||
41
authentik/lib/sync/outgoing/models.py
Normal file
41
authentik/lib/sync/outgoing/models.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from typing import Any, Self
|
||||||
|
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.db.models import Model, QuerySet, TextChoices
|
||||||
|
from redis.lock import Lock
|
||||||
|
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.lib.sync.outgoing import PAGE_TIMEOUT
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingSyncDeleteAction(TextChoices):
|
||||||
|
"""Action taken when a user/group is deleted in authentik. Suspend is not available for groups,
|
||||||
|
and will be treated as `do_nothing`"""
|
||||||
|
|
||||||
|
DO_NOTHING = "do_nothing"
|
||||||
|
DELETE = "delete"
|
||||||
|
SUSPEND = "suspend"
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingSyncProvider(Model):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
def client_for_model[
|
||||||
|
T: User | Group
|
||||||
|
](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sync_lock(self) -> Lock:
|
||||||
|
"""Redis lock to prevent multiple parallel syncs happening"""
|
||||||
|
return Lock(
|
||||||
|
cache.client.get_client(),
|
||||||
|
name=f"goauthentik.io/providers/outgoing-sync/{str(self.pk)}",
|
||||||
|
timeout=(60 * 60 * PAGE_TIMEOUT) * 3,
|
||||||
|
)
|
||||||
71
authentik/lib/sync/outgoing/signals.py
Normal file
71
authentik/lib/sync/outgoing/signals.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from django.core.paginator import Paginator
|
||||||
|
from django.db.models import Model
|
||||||
|
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
||||||
|
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT
|
||||||
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||||
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
|
|
||||||
|
|
||||||
|
def register_signals(
|
||||||
|
provider_type: type[OutgoingSyncProvider],
|
||||||
|
task_sync_single: Callable[[int], None],
|
||||||
|
task_sync_direct: Callable[[int], None],
|
||||||
|
task_sync_m2m: Callable[[int], None],
|
||||||
|
):
|
||||||
|
"""Register sync signals"""
|
||||||
|
uid = class_to_path(provider_type)
|
||||||
|
|
||||||
|
def post_save_provider(sender: type[Model], instance: OutgoingSyncProvider, created: bool, **_):
|
||||||
|
"""Trigger sync when Provider is saved"""
|
||||||
|
users_paginator = Paginator(instance.get_object_qs(User), PAGE_SIZE)
|
||||||
|
groups_paginator = Paginator(instance.get_object_qs(Group), PAGE_SIZE)
|
||||||
|
soft_time_limit = (users_paginator.num_pages + groups_paginator.num_pages) * PAGE_TIMEOUT
|
||||||
|
time_limit = soft_time_limit * 1.5
|
||||||
|
task_sync_single.apply_async(
|
||||||
|
(instance.pk,), time_limit=int(time_limit), soft_time_limit=int(soft_time_limit)
|
||||||
|
)
|
||||||
|
|
||||||
|
post_save.connect(post_save_provider, provider_type, dispatch_uid=uid, weak=False)
|
||||||
|
|
||||||
|
def model_post_save(sender: type[Model], instance: User | Group, created: bool, **_):
|
||||||
|
"""Post save handler"""
|
||||||
|
if not provider_type.objects.filter(backchannel_application__isnull=False).exists():
|
||||||
|
return
|
||||||
|
task_sync_direct.delay(class_to_path(instance.__class__), instance.pk, Direction.add.value)
|
||||||
|
|
||||||
|
post_save.connect(model_post_save, User, dispatch_uid=uid, weak=False)
|
||||||
|
post_save.connect(model_post_save, Group, dispatch_uid=uid, weak=False)
|
||||||
|
|
||||||
|
def model_pre_delete(sender: type[Model], instance: User | Group, **_):
|
||||||
|
"""Pre-delete handler"""
|
||||||
|
if not provider_type.objects.filter(backchannel_application__isnull=False).exists():
|
||||||
|
return
|
||||||
|
task_sync_direct.delay(
|
||||||
|
class_to_path(instance.__class__), instance.pk, Direction.remove.value
|
||||||
|
).get(propagate=False)
|
||||||
|
|
||||||
|
pre_delete.connect(model_pre_delete, User, dispatch_uid=uid, weak=False)
|
||||||
|
pre_delete.connect(model_pre_delete, Group, dispatch_uid=uid, weak=False)
|
||||||
|
|
||||||
|
def model_m2m_changed(
|
||||||
|
sender: type[Model], instance, action: str, pk_set: set, reverse: bool, **kwargs
|
||||||
|
):
|
||||||
|
"""Sync group membership"""
|
||||||
|
if action not in ["post_add", "post_remove"]:
|
||||||
|
return
|
||||||
|
if not provider_type.objects.filter(backchannel_application__isnull=False).exists():
|
||||||
|
return
|
||||||
|
# reverse: instance is a Group, pk_set is a list of user pks
|
||||||
|
# non-reverse: instance is a User, pk_set is a list of groups
|
||||||
|
if reverse:
|
||||||
|
task_sync_m2m.delay(str(instance.pk), action, list(pk_set))
|
||||||
|
else:
|
||||||
|
for group_pk in pk_set:
|
||||||
|
task_sync_m2m.delay(group_pk, action, [instance.pk])
|
||||||
|
|
||||||
|
m2m_changed.connect(model_m2m_changed, User.ak_groups.through, dispatch_uid=uid, weak=False)
|
||||||
245
authentik/lib/sync/outgoing/tasks.py
Normal file
245
authentik/lib/sync/outgoing/tasks.py
Normal file
@ -0,0 +1,245 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from celery.exceptions import Retry
|
||||||
|
from celery.result import allow_join_result
|
||||||
|
from django.core.paginator import Paginator
|
||||||
|
from django.db.models import Model, QuerySet
|
||||||
|
from django.utils.text import slugify
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
|
from authentik.core.expression.exceptions import SkipObjectException
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.events.logs import LogEvent
|
||||||
|
from authentik.events.models import TaskStatus
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT
|
||||||
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||||
|
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||||
|
|
||||||
|
|
||||||
|
class SyncTasks:
|
||||||
|
"""Container for all sync 'tasks' (this class doesn't actually contain celery
|
||||||
|
tasks due to celery's magic, however exposes a number of functions to be called from tasks)"""
|
||||||
|
|
||||||
|
logger: BoundLogger
|
||||||
|
|
||||||
|
def __init__(self, provider_model: type[OutgoingSyncProvider]) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._provider_model = provider_model
|
||||||
|
|
||||||
|
def sync_all(self, single_sync: Callable[[int], None]):
|
||||||
|
for provider in self._provider_model.objects.filter(backchannel_application__isnull=False):
|
||||||
|
self.trigger_single_task(provider, single_sync)
|
||||||
|
|
||||||
|
def trigger_single_task(self, provider: OutgoingSyncProvider, sync_task: Callable[[int], None]):
|
||||||
|
"""Wrapper single sync task that correctly sets time limits based
|
||||||
|
on the amount of objects that will be synced"""
|
||||||
|
users_paginator = Paginator(provider.get_object_qs(User), PAGE_SIZE)
|
||||||
|
groups_paginator = Paginator(provider.get_object_qs(Group), PAGE_SIZE)
|
||||||
|
soft_time_limit = (users_paginator.num_pages + groups_paginator.num_pages) * PAGE_TIMEOUT
|
||||||
|
time_limit = soft_time_limit * 1.5
|
||||||
|
return sync_task.apply_async(
|
||||||
|
(provider.pk,), time_limit=int(time_limit), soft_time_limit=int(soft_time_limit)
|
||||||
|
)
|
||||||
|
|
||||||
|
def sync_single(
|
||||||
|
self,
|
||||||
|
task: SystemTask,
|
||||||
|
provider_pk: int,
|
||||||
|
sync_objects: Callable[[int, int], list[str]],
|
||||||
|
):
|
||||||
|
self.logger = get_logger().bind(
|
||||||
|
provider_type=class_to_path(self._provider_model),
|
||||||
|
provider_pk=provider_pk,
|
||||||
|
)
|
||||||
|
provider = self._provider_model.objects.filter(
|
||||||
|
pk=provider_pk, backchannel_application__isnull=False
|
||||||
|
).first()
|
||||||
|
if not provider:
|
||||||
|
return
|
||||||
|
lock = provider.sync_lock
|
||||||
|
if lock.locked():
|
||||||
|
self.logger.debug("Sync locked, skipping task", source=provider.name)
|
||||||
|
return
|
||||||
|
task.set_uid(slugify(provider.name))
|
||||||
|
messages = []
|
||||||
|
messages.append(_("Starting full provider sync"))
|
||||||
|
self.logger.debug("Starting provider sync")
|
||||||
|
users_paginator = Paginator(provider.get_object_qs(User), PAGE_SIZE)
|
||||||
|
groups_paginator = Paginator(provider.get_object_qs(Group), PAGE_SIZE)
|
||||||
|
with allow_join_result(), lock:
|
||||||
|
try:
|
||||||
|
for page in users_paginator.page_range:
|
||||||
|
messages.append(_("Syncing page %(page)d of users" % {"page": page}))
|
||||||
|
for msg in sync_objects.apply_async(
|
||||||
|
args=(class_to_path(User), page, provider_pk),
|
||||||
|
time_limit=PAGE_TIMEOUT,
|
||||||
|
soft_time_limit=PAGE_TIMEOUT,
|
||||||
|
).get():
|
||||||
|
messages.append(msg)
|
||||||
|
for page in groups_paginator.page_range:
|
||||||
|
messages.append(_("Syncing page %(page)d of groups" % {"page": page}))
|
||||||
|
for msg in sync_objects.apply_async(
|
||||||
|
args=(class_to_path(Group), page, provider_pk),
|
||||||
|
time_limit=PAGE_TIMEOUT,
|
||||||
|
soft_time_limit=PAGE_TIMEOUT,
|
||||||
|
).get():
|
||||||
|
messages.append(msg)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
self.logger.warning("transient sync exception", exc=exc)
|
||||||
|
raise task.retry(exc=exc) from exc
|
||||||
|
except StopSync as exc:
|
||||||
|
task.set_error(exc)
|
||||||
|
return
|
||||||
|
task.set_status(TaskStatus.SUCCESSFUL, *messages)
|
||||||
|
|
||||||
|
def sync_objects(self, object_type: str, page: int, provider_pk: int):
|
||||||
|
_object_type = path_to_class(object_type)
|
||||||
|
self.logger = get_logger().bind(
|
||||||
|
provider_type=class_to_path(self._provider_model),
|
||||||
|
provider_pk=provider_pk,
|
||||||
|
object_type=object_type,
|
||||||
|
)
|
||||||
|
messages = []
|
||||||
|
provider = self._provider_model.objects.filter(pk=provider_pk).first()
|
||||||
|
if not provider:
|
||||||
|
return messages
|
||||||
|
try:
|
||||||
|
client = provider.client_for_model(_object_type)
|
||||||
|
except TransientSyncException:
|
||||||
|
return messages
|
||||||
|
paginator = Paginator(provider.get_object_qs(_object_type), PAGE_SIZE)
|
||||||
|
if client.can_discover:
|
||||||
|
self.logger.debug("starting discover")
|
||||||
|
client.discover()
|
||||||
|
self.logger.debug("starting sync for page", page=page)
|
||||||
|
for obj in paginator.page(page).object_list:
|
||||||
|
obj: Model
|
||||||
|
try:
|
||||||
|
client.write(obj)
|
||||||
|
except SkipObjectException:
|
||||||
|
continue
|
||||||
|
except BadRequestSyncException as exc:
|
||||||
|
self.logger.warning("failed to sync object", exc=exc, obj=obj)
|
||||||
|
messages.append(
|
||||||
|
LogEvent(
|
||||||
|
_(
|
||||||
|
(
|
||||||
|
"Failed to sync {object_type} {object_name} "
|
||||||
|
"due to error: {error}"
|
||||||
|
).format_map(
|
||||||
|
{
|
||||||
|
"object_type": obj._meta.verbose_name,
|
||||||
|
"object_name": str(obj),
|
||||||
|
"error": str(exc),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
log_level="warning",
|
||||||
|
logger="",
|
||||||
|
attributes={"arguments": exc.args[1:]},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
self.logger.warning("failed to sync object", exc=exc, user=obj)
|
||||||
|
messages.append(
|
||||||
|
LogEvent(
|
||||||
|
_(
|
||||||
|
(
|
||||||
|
"Failed to sync {object_type} {object_name} "
|
||||||
|
"due to transient error: {error}"
|
||||||
|
).format_map(
|
||||||
|
{
|
||||||
|
"object_type": obj._meta.verbose_name,
|
||||||
|
"object_name": str(obj),
|
||||||
|
"error": str(exc),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
log_level="warning",
|
||||||
|
logger="",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except StopSync as exc:
|
||||||
|
self.logger.warning("Stopping sync", exc=exc)
|
||||||
|
messages.append(
|
||||||
|
LogEvent(
|
||||||
|
_(
|
||||||
|
"Stopping sync due to error: {error}".format_map(
|
||||||
|
{
|
||||||
|
"error": exc.detail(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
log_level="warning",
|
||||||
|
logger="",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
return messages
|
||||||
|
|
||||||
|
def sync_signal_direct(self, model: str, pk: str | int, raw_op: str):
|
||||||
|
self.logger = get_logger().bind(
|
||||||
|
provider_type=class_to_path(self._provider_model),
|
||||||
|
)
|
||||||
|
model_class: type[Model] = path_to_class(model)
|
||||||
|
instance = model_class.objects.filter(pk=pk).first()
|
||||||
|
if not instance:
|
||||||
|
return
|
||||||
|
operation = Direction(raw_op)
|
||||||
|
for provider in self._provider_model.objects.filter(backchannel_application__isnull=False):
|
||||||
|
client = provider.client_for_model(instance.__class__)
|
||||||
|
# Check if the object is allowed within the provider's restrictions
|
||||||
|
queryset = provider.get_object_qs(instance.__class__)
|
||||||
|
if not queryset:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# The queryset we get from the provider must include the instance we've got given
|
||||||
|
# otherwise ignore this provider
|
||||||
|
if not queryset.filter(pk=instance.pk).exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
if operation == Direction.add:
|
||||||
|
client.write(instance)
|
||||||
|
if operation == Direction.remove:
|
||||||
|
client.delete(instance)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
raise Retry() from exc
|
||||||
|
except StopSync as exc:
|
||||||
|
self.logger.warning(exc, provider_pk=provider.pk)
|
||||||
|
|
||||||
|
def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]):
|
||||||
|
self.logger = get_logger().bind(
|
||||||
|
provider_type=class_to_path(self._provider_model),
|
||||||
|
)
|
||||||
|
group = Group.objects.filter(pk=group_pk).first()
|
||||||
|
if not group:
|
||||||
|
return
|
||||||
|
for provider in self._provider_model.objects.filter(backchannel_application__isnull=False):
|
||||||
|
# Check if the object is allowed within the provider's restrictions
|
||||||
|
queryset: QuerySet = provider.get_object_qs(Group)
|
||||||
|
# The queryset we get from the provider must include the instance we've got given
|
||||||
|
# otherwise ignore this provider
|
||||||
|
if not queryset.filter(pk=group_pk).exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
client = provider.client_for_model(Group)
|
||||||
|
try:
|
||||||
|
operation = None
|
||||||
|
if action == "post_add":
|
||||||
|
operation = Direction.add
|
||||||
|
if action == "post_remove":
|
||||||
|
operation = Direction.remove
|
||||||
|
client.update_group(group, operation, pk_set)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
raise Retry() from exc
|
||||||
|
except StopSync as exc:
|
||||||
|
self.logger.warning(exc, provider_pk=provider.pk)
|
||||||
@ -24,7 +24,7 @@ def load_fixture(path: str, **kwargs) -> str:
|
|||||||
fixture = _fixture.read()
|
fixture = _fixture.read()
|
||||||
try:
|
try:
|
||||||
return fixture % kwargs
|
return fixture % kwargs
|
||||||
except TypeError:
|
except (TypeError, ValueError):
|
||||||
return fixture
|
return fixture
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,9 @@
|
|||||||
"""Serializer validators"""
|
"""Serializer validators"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
from django.utils.regex_helper import _lazy_re_compile
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
@ -29,3 +33,48 @@ class RequiredTogetherValidator:
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<{self.__class__.__name__}(fields={smart_repr(self.fields)})>"
|
return f"<{self.__class__.__name__}(fields={smart_repr(self.fields)})>"
|
||||||
|
|
||||||
|
|
||||||
|
class DomainlessURLValidator(URLValidator):
|
||||||
|
"""Subclass of URLValidator which doesn't check the domain
|
||||||
|
(to allow hostnames without domain)"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.host_re = "(" + self.hostname_re + self.domain_re + "|localhost)"
|
||||||
|
self.regex = _lazy_re_compile(
|
||||||
|
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
|
||||||
|
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
|
||||||
|
r"(?:" + self.ipv4_re + "|" + self.ipv6_re + "|" + self.host_re + ")"
|
||||||
|
r"(?::\d{2,5})?" # port
|
||||||
|
r"(?:[/?#][^\s]*)?" # resource path
|
||||||
|
r"\Z",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
self.schemes = ["http", "https", "blank"] + list(self.schemes)
|
||||||
|
|
||||||
|
def __call__(self, value: str):
|
||||||
|
# Check if the scheme is valid.
|
||||||
|
scheme = value.split("://")[0].lower()
|
||||||
|
if scheme not in self.schemes:
|
||||||
|
value = "default" + value
|
||||||
|
super().__call__(value)
|
||||||
|
|
||||||
|
|
||||||
|
class DomainlessFormattedURLValidator(DomainlessURLValidator):
|
||||||
|
"""URL validator which allows for python format strings"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.formatter_re = r"([%\(\)a-zA-Z])*"
|
||||||
|
self.host_re = "(" + self.formatter_re + self.hostname_re + self.domain_re + "|localhost)"
|
||||||
|
self.regex = _lazy_re_compile(
|
||||||
|
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
|
||||||
|
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
|
||||||
|
r"(?:" + self.ipv4_re + "|" + self.ipv6_re + "|" + self.host_re + ")"
|
||||||
|
r"(?::\d{2,5})?" # port
|
||||||
|
r"(?:[/?#][^\s]*)?" # resource path
|
||||||
|
r"\Z",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
self.schemes = ["http", "https", "blank"] + list(self.schemes)
|
||||||
|
|||||||
@ -23,7 +23,6 @@ from authentik.outposts.models import (
|
|||||||
KubernetesServiceConnection,
|
KubernetesServiceConnection,
|
||||||
OutpostServiceConnection,
|
OutpostServiceConnection,
|
||||||
)
|
)
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceConnectionSerializer(ModelSerializer, MetaNameSerializer):
|
class ServiceConnectionSerializer(ModelSerializer, MetaNameSerializer):
|
||||||
@ -89,7 +88,7 @@ class ServiceConnectionViewSet(
|
|||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
return Response(TypeCreateSerializer(data, many=True).data)
|
||||||
|
|
||||||
@extend_schema(responses={200: ServiceConnectionStateSerializer(many=False)})
|
@extend_schema(responses={200: ServiceConnectionStateSerializer(many=False)})
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def state(self, request: Request, pk: str) -> Response:
|
def state(self, request: Request, pk: str) -> Response:
|
||||||
"""Get the service connection's state"""
|
"""Get the service connection's state"""
|
||||||
connection = self.get_object()
|
connection = self.get_object()
|
||||||
|
|||||||
18
authentik/outposts/migrations/0022_outpost_deleted_at.py
Normal file
18
authentik/outposts/migrations/0022_outpost_deleted_at.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 5.0.4 on 2024-04-23 21:00
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_outposts", "0021_alter_outpost_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="deleted_at",
|
||||||
|
field=models.DateTimeField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -33,7 +33,7 @@ from authentik.core.models import (
|
|||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
from authentik.lib.models import InheritanceForeignKey, SerializerModel, SoftDeleteModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.outposts.controllers.k8s.utils import get_namespace
|
from authentik.outposts.controllers.k8s.utils import get_namespace
|
||||||
@ -131,7 +131,7 @@ class OutpostServiceConnection(models.Model):
|
|||||||
verbose_name = _("Outpost Service-Connection")
|
verbose_name = _("Outpost Service-Connection")
|
||||||
verbose_name_plural = _("Outpost Service-Connections")
|
verbose_name_plural = _("Outpost Service-Connections")
|
||||||
|
|
||||||
def __str__(self) -> __version__:
|
def __str__(self):
|
||||||
return f"Outpost service connection {self.name}"
|
return f"Outpost service connection {self.name}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -241,7 +241,7 @@ class KubernetesServiceConnection(SerializerModel, OutpostServiceConnection):
|
|||||||
return "ak-service-connection-kubernetes-form"
|
return "ak-service-connection-kubernetes-form"
|
||||||
|
|
||||||
|
|
||||||
class Outpost(SerializerModel, ManagedModel):
|
class Outpost(SoftDeleteModel, SerializerModel, ManagedModel):
|
||||||
"""Outpost instance which manages a service user and token"""
|
"""Outpost instance which manages a service user and token"""
|
||||||
|
|
||||||
uuid = models.UUIDField(default=uuid4, editable=False, primary_key=True)
|
uuid = models.UUIDField(default=uuid4, editable=False, primary_key=True)
|
||||||
|
|||||||
@ -2,13 +2,14 @@
|
|||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import m2m_changed, post_save, pre_delete, pre_save
|
from django.db.models.signals import m2m_changed, post_save, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.brands.models import Brand
|
from authentik.brands.models import Brand
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.lib.models import post_soft_delete
|
||||||
from authentik.lib.utils.reflection import class_to_path
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
||||||
from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save
|
from authentik.outposts.tasks import CACHE_KEY_OUTPOST_DOWN, outpost_controller, outpost_post_save
|
||||||
@ -67,9 +68,7 @@ def post_save_update(sender, instance: Model, created: bool, **_):
|
|||||||
outpost_post_save.delay(class_to_path(instance.__class__), instance.pk)
|
outpost_post_save.delay(class_to_path(instance.__class__), instance.pk)
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=Outpost)
|
@receiver(post_soft_delete, sender=Outpost)
|
||||||
def pre_delete_cleanup(sender, instance: Outpost, **_):
|
def outpost_cleanup(sender, instance: Outpost, **_):
|
||||||
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
|
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
|
||||||
instance.user.delete()
|
outpost_controller.delay(instance.pk.hex, action="down")
|
||||||
cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance)
|
|
||||||
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
|
||||||
|
|||||||
@ -129,17 +129,14 @@ def outpost_controller_all():
|
|||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||||
def outpost_controller(
|
def outpost_controller(self: SystemTask, outpost_pk: str, action: str = "up"):
|
||||||
self: SystemTask, outpost_pk: str, action: str = "up", from_cache: bool = False
|
|
||||||
):
|
|
||||||
"""Create/update/monitor/delete the deployment of an Outpost"""
|
"""Create/update/monitor/delete the deployment of an Outpost"""
|
||||||
logs = []
|
logs = []
|
||||||
if from_cache:
|
outpost: Outpost = None
|
||||||
outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
|
if action == "up":
|
||||||
LOGGER.debug("Getting outpost from cache to delete")
|
outpost = Outpost.objects.filter(pk=outpost_pk).first()
|
||||||
else:
|
elif action == "down":
|
||||||
outpost: Outpost = Outpost.objects.filter(pk=outpost_pk).first()
|
outpost = Outpost.deleted.filter(pk=outpost_pk).first()
|
||||||
LOGGER.debug("Getting outpost from DB")
|
|
||||||
if not outpost:
|
if not outpost:
|
||||||
LOGGER.warning("No outpost")
|
LOGGER.warning("No outpost")
|
||||||
return
|
return
|
||||||
@ -155,9 +152,10 @@ def outpost_controller(
|
|||||||
except (ControllerException, ServiceConnectionInvalid) as exc:
|
except (ControllerException, ServiceConnectionInvalid) as exc:
|
||||||
self.set_error(exc)
|
self.set_error(exc)
|
||||||
else:
|
else:
|
||||||
if from_cache:
|
|
||||||
cache.delete(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
|
|
||||||
self.set_status(TaskStatus.SUCCESSFUL, *logs)
|
self.set_status(TaskStatus.SUCCESSFUL, *logs)
|
||||||
|
finally:
|
||||||
|
if outpost.deleted_at:
|
||||||
|
outpost.force_delete()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user