Compare commits
274 Commits
web/legibi
...
5165-passw
| Author | SHA1 | Date | |
|---|---|---|---|
| 5b9bb12822 | |||
| 8f09955d58 | |||
| 8f82dac84e | |||
| 89c08f2588 | |||
| 113d6cc45f | |||
| 05cfbca5f2 | |||
| 385f9bcbac | |||
| 86bc5b4cdb | |||
| 8d5d9f35ef | |||
| 439169d5e7 | |||
| e405b33f9f | |||
| a9c13d4d10 | |||
| 12b16b17a2 | |||
| 3473abee32 | |||
| b5a0b3a521 | |||
| 837a0325ca | |||
| b1050e8825 | |||
| 7bb90b1661 | |||
| 8f755785ea | |||
| adc0ec8ee4 | |||
| 9379b3d178 | |||
| 18eef8ae05 | |||
| f0e22fd08b | |||
| ce5297a6cd | |||
| 74c8df8782 | |||
| 2d897fac48 | |||
| 2eb47c9efa | |||
| 9852041f38 | |||
| 7f74936212 | |||
| 4f40b1e27c | |||
| 041e407153 | |||
| 31b891428e | |||
| 4ab8247847 | |||
| af24edf8c1 | |||
| f8bfd12e31 | |||
| 993fc4b77b | |||
| 3ee2be09bf | |||
| b3e3948f44 | |||
| 117a5cd88d | |||
| 342a40212e | |||
| 15ae11d9d5 | |||
| e11df56f21 | |||
| 3771af5282 | |||
| 0718053c56 | |||
| 3e5014bfea | |||
| 78f49ddc04 | |||
| 744bc54231 | |||
| 44ba30ad75 | |||
| 63b991f137 | |||
| 7f9acb8a2b | |||
| ce3ba32044 | |||
| 25e6a69331 | |||
| 78d07cc355 | |||
| 3c4df47fe3 | |||
| 8ed1805cb8 | |||
| 4d23db73ca | |||
| 72783953fb | |||
| 3b0fdb3dbd | |||
| 23161eed12 | |||
| 8918427588 | |||
| 5d858020f6 | |||
| 198e8b98a8 | |||
| 88e9c9b669 | |||
| 0c652a210d | |||
| 105a90d2e7 | |||
| 68f5abe318 | |||
| 59d4c18636 | |||
| b67e2a1144 | |||
| fc025651ce | |||
| 7cedc840b0 | |||
| 5ba731e48b | |||
| c792534a50 | |||
| a136fd8b54 | |||
| fb63c1f3e9 | |||
| 0b15ab3f27 | |||
| f9fd67c2b8 | |||
| 4ac19f9d44 | |||
| 74d3e92bac | |||
| 207fa13405 | |||
| 208e4a8bed | |||
| ee245ab390 | |||
| 60c8837082 | |||
| 6cf418a37e | |||
| 254761e930 | |||
| d85f8758fe | |||
| 84bfb37b60 | |||
| 578ff13868 | |||
| d12acb5bcd | |||
| 0e8b9a6409 | |||
| 6171443e61 | |||
| 5fedd616d9 | |||
| 5dd6498694 | |||
| cf5102ed20 | |||
| d3b2032c33 | |||
| 1e5df1c405 | |||
| 96eabe269c | |||
| 3e869a0ec7 | |||
| 7276a416f6 | |||
| a989390533 | |||
| 562c52a48b | |||
| c3cb9bc778 | |||
| 5f65a7c6cc | |||
| 95d26563e7 | |||
| 1cac1492d7 | |||
| 6c1ac48bd9 | |||
| 97f11f7aa8 | |||
| 6db763f7dc | |||
| 16b5f692ee | |||
| 80c1bd690c | |||
| 040dcaa9d6 | |||
| 66a16752e4 | |||
| 70c0e1be99 | |||
| 5beea4624f | |||
| 50fffa72cc | |||
| dae4bf0d6b | |||
| 823851652e | |||
| ae7f7c9930 | |||
| 5ce4ed4dd3 | |||
| 5582cc7745 | |||
| c384ed5f52 | |||
| 02e2ba8971 | |||
| 925d5c80df | |||
| 1de69a7bd6 | |||
| c6979a48e0 | |||
| 6e73d60305 | |||
| f388cac07c | |||
| cf593e5cb9 | |||
| c3a98e5d5f | |||
| 1048729599 | |||
| 72442b37e5 | |||
| 211cdb3a21 | |||
| 4cca16750e | |||
| b2d261dd1c | |||
| 0663100429 | |||
| 66c3261eeb | |||
| bf7570bc36 | |||
| 20b52d0dbd | |||
| a1f5e284c4 | |||
| 0e4737d38f | |||
| 609b10f7f8 | |||
| 2cff3d15e7 | |||
| 4f1d49417c | |||
| 0766a47b4f | |||
| bd1ddfebd6 | |||
| a841743c74 | |||
| 0974456ac8 | |||
| d44d5a44a1 | |||
| edf5c8686a | |||
| 70ace8b209 | |||
| c3509e63af | |||
| 89b8206176 | |||
| 908d87c142 | |||
| 4ab4e81fb0 | |||
| 6dae1a4fe7 | |||
| d11de73e95 | |||
| b08fb5fdf1 | |||
| 3c9e8c7287 | |||
| 691d0be41e | |||
| dfbaccbab6 | |||
| f3bdb189f6 | |||
| 85b3523639 | |||
| 9ff61a7120 | |||
| f742b986a7 | |||
| 177bdfa689 | |||
| c3445374c2 | |||
| c2da6822dc | |||
| 493294ef9f | |||
| 17f807e8b0 | |||
| 96eb98500c | |||
| ddd75f6d09 | |||
| fbad02fac1 | |||
| fbab822db1 | |||
| d8316eea9b | |||
| 8182c9f7c2 | |||
| 5d94b97e97 | |||
| 35ddbb6d75 | |||
| 2b8bc38fc3 | |||
| 9b0b504531 | |||
| c312430007 | |||
| 4e65c205e3 | |||
| 372a66c876 | |||
| 3630349388 | |||
| 347746cbcd | |||
| ef2e1ad27b | |||
| 8a6b34eb5c | |||
| 26f72bcac4 | |||
| f04466b3be | |||
| 4ba53d2f08 | |||
| 7a13046a27 | |||
| 939e2c1edd | |||
| cf06b4177a | |||
| f8079d63fa | |||
| 576a56c562 | |||
| cf9b14213e | |||
| 73cbdb77ed | |||
| fd66be9fa2 | |||
| 96bf9ee898 | |||
| 6c4c535d57 | |||
| 0ed4bba5a5 | |||
| 6e31e5b889 | |||
| a5467c6e19 | |||
| 09832355e3 | |||
| 6ffef878f0 | |||
| 644090dc58 | |||
| d07508b9a4 | |||
| 44d7e81a93 | |||
| 2e91b9d035 | |||
| 964c6a1050 | |||
| 90a1c5ab85 | |||
| 8162c1ec86 | |||
| ab46610d9b | |||
| 6909b58279 | |||
| 6d7a06227f | |||
| 1459a13991 | |||
| 1921ce39f6 | |||
| 263cff6393 | |||
| 5a61688472 | |||
| 919b56c466 | |||
| db7bc8b7ad | |||
| 5768cb5858 | |||
| 5b77bc33c7 | |||
| 93650e925a | |||
| 83823068fe | |||
| d922f41438 | |||
| ef3d545d7a | |||
| e9efbb2161 | |||
| a3634ab21d | |||
| f28209548b | |||
| 9589e04072 | |||
| 6490703ba3 | |||
| ca2fce05f5 | |||
| 9dc813d9ab | |||
| 833c66a9dd | |||
| 5d54f696d4 | |||
| 0746652995 | |||
| a1a55c644a | |||
| fce57d258e | |||
| 17e30b7adc | |||
| c5b3f8a578 | |||
| 3a3619fa18 | |||
| 21b933efff | |||
| 97fc2cba69 | |||
| 7ef627d476 | |||
| d16c603499 | |||
| 99a69bb52f | |||
| ac8192d660 | |||
| cdf3449230 | |||
| ef2a40ed7d | |||
| 09cacbd76b | |||
| cb33f0d1e2 | |||
| 90af4b29a6 | |||
| 6b9158591e | |||
| 9c15cda191 | |||
| 046b8d5cbf | |||
| 8b74b83983 | |||
| 8de038b387 | |||
| 2edc651582 | |||
| 85594a119c | |||
| 1a97ccea03 | |||
| 99ad492951 | |||
| ff4ec6f9b4 | |||
| 0c49de67b8 | |||
| 0d73528ec7 | |||
| 80ca4e5722 | |||
| 1ebe200a46 | |||
| 5683c81f27 | |||
| f0477309d5 | |||
| 465820b002 | |||
| a75c9434d9 | |||
| 4ea9b69ab5 | |||
| c48eee0ebf | |||
| 0d94373f10 | |||
| 1c85dc512f | |||
| a71778651f |
2
.github/actions/setup/docker-compose.yml
vendored
2
.github/actions/setup/docker-compose.yml
vendored
@ -1,5 +1,3 @@
|
|||||||
version: "3.7"
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgresql:
|
postgresql:
|
||||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||||
|
|||||||
1
.github/codespell-words.txt
vendored
1
.github/codespell-words.txt
vendored
@ -4,3 +4,4 @@ hass
|
|||||||
warmup
|
warmup
|
||||||
ontext
|
ontext
|
||||||
singed
|
singed
|
||||||
|
assertIn
|
||||||
|
|||||||
6
.github/workflows/ci-main.yml
vendored
6
.github/workflows/ci-main.yml
vendored
@ -50,7 +50,6 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
steps:
|
steps:
|
||||||
@ -104,7 +103,6 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
steps:
|
steps:
|
||||||
@ -252,8 +250,8 @@ jobs:
|
|||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
cache-from: type=gha
|
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max
|
||||||
platforms: linux/${{ matrix.arch }}
|
platforms: linux/${{ matrix.arch }}
|
||||||
pr-comment:
|
pr-comment:
|
||||||
needs:
|
needs:
|
||||||
|
|||||||
4
.github/workflows/ci-outpost.yml
vendored
4
.github/workflows/ci-outpost.yml
vendored
@ -105,8 +105,8 @@ jobs:
|
|||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
cache-from: type=gha
|
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max
|
||||||
build-binary:
|
build-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
|
|||||||
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@ -4,20 +4,21 @@
|
|||||||
"asgi",
|
"asgi",
|
||||||
"authentik",
|
"authentik",
|
||||||
"authn",
|
"authn",
|
||||||
|
"entra",
|
||||||
"goauthentik",
|
"goauthentik",
|
||||||
"jwks",
|
"jwks",
|
||||||
|
"kubernetes",
|
||||||
"oidc",
|
"oidc",
|
||||||
"openid",
|
"openid",
|
||||||
|
"passwordless",
|
||||||
"plex",
|
"plex",
|
||||||
"saml",
|
"saml",
|
||||||
"totp",
|
|
||||||
"webauthn",
|
|
||||||
"traefik",
|
|
||||||
"passwordless",
|
|
||||||
"kubernetes",
|
|
||||||
"sso",
|
|
||||||
"slo",
|
|
||||||
"scim",
|
"scim",
|
||||||
|
"slo",
|
||||||
|
"sso",
|
||||||
|
"totp",
|
||||||
|
"traefik",
|
||||||
|
"webauthn",
|
||||||
],
|
],
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
"todo-tree.tree.showBadges": true,
|
"todo-tree.tree.showBadges": true,
|
||||||
|
|||||||
30
Dockerfile
30
Dockerfile
@ -38,7 +38,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
|||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.3-bookworm AS go-builder
|
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
|
||||||
|
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
@ -49,6 +49,11 @@ ARG GOARCH=$TARGETARCH
|
|||||||
|
|
||||||
WORKDIR /go/src/goauthentik.io
|
WORKDIR /go/src/goauthentik.io
|
||||||
|
|
||||||
|
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||||
|
dpkg --add-architecture arm64 && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||||
--mount=type=cache,target=/go/pkg/mod \
|
--mount=type=cache,target=/go/pkg/mod \
|
||||||
@ -63,11 +68,11 @@ COPY ./internal /go/src/goauthentik.io/internal
|
|||||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||||
|
|
||||||
ENV CGO_ENABLED=0
|
|
||||||
|
|
||||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
|
||||||
|
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
|
||||||
|
go build -o /go/authentik ./cmd/server
|
||||||
|
|
||||||
# Stage 4: MaxMind GeoIP
|
# Stage 4: MaxMind GeoIP
|
||||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
|
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
|
||||||
@ -84,7 +89,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM docker.io/python:3.12.3-slim-bookworm AS python-deps
|
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps
|
||||||
|
|
||||||
WORKDIR /ak-root/poetry
|
WORKDIR /ak-root/poetry
|
||||||
|
|
||||||
@ -97,7 +102,7 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa
|
|||||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
# Required for installing pip packages
|
# Required for installing pip packages
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
|
||||||
|
|
||||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||||
@ -105,12 +110,13 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||||
python -m venv /ak-root/venv/ && \
|
python -m venv /ak-root/venv/ && \
|
||||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||||
pip3 install --upgrade pip && \
|
pip3 install --upgrade pip && \
|
||||||
pip3 install poetry && \
|
pip3 install poetry && \
|
||||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||||
|
pip install --force-reinstall /wheels/*"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.12.3-slim-bookworm AS final-image
|
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image
|
||||||
|
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
@ -127,7 +133,7 @@ WORKDIR /
|
|||||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \
|
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
@ -163,6 +169,8 @@ ENV TMPDIR=/dev/shm/ \
|
|||||||
VENV_PATH="/ak-root/venv" \
|
VENV_PATH="/ak-root/venv" \
|
||||||
POETRY_VIRTUALENVS_CREATE=false
|
POETRY_VIRTUALENVS_CREATE=false
|
||||||
|
|
||||||
|
ENV GOFIPS=1
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||||
|
|
||||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
||||||
|
|||||||
1
Makefile
1
Makefile
@ -253,6 +253,7 @@ website-watch: ## Build and watch the documentation website, updating automatic
|
|||||||
#########################
|
#########################
|
||||||
|
|
||||||
docker: ## Build a docker image of the current source tree
|
docker: ## Build a docker image of the current source tree
|
||||||
|
mkdir -p ${GEN_API_TS}
|
||||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
|
|||||||
@ -2,17 +2,19 @@
|
|||||||
|
|
||||||
import platform
|
import platform
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from ssl import OPENSSL_VERSION
|
||||||
from sys import version as python_version
|
from sys import version as python_version
|
||||||
from typing import TypedDict
|
from typing import TypedDict
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.openssl.backend import backend
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from gunicorn import version_info as gunicorn_version
|
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from authentik import get_full_version
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
@ -25,11 +27,13 @@ class RuntimeDict(TypedDict):
|
|||||||
"""Runtime information"""
|
"""Runtime information"""
|
||||||
|
|
||||||
python_version: str
|
python_version: str
|
||||||
gunicorn_version: str
|
|
||||||
environment: str
|
environment: str
|
||||||
architecture: str
|
architecture: str
|
||||||
platform: str
|
platform: str
|
||||||
uname: str
|
uname: str
|
||||||
|
openssl_version: str
|
||||||
|
openssl_fips_mode: bool
|
||||||
|
authentik_version: str
|
||||||
|
|
||||||
|
|
||||||
class SystemInfoSerializer(PassiveSerializer):
|
class SystemInfoSerializer(PassiveSerializer):
|
||||||
@ -64,11 +68,13 @@ class SystemInfoSerializer(PassiveSerializer):
|
|||||||
def get_runtime(self, request: Request) -> RuntimeDict:
|
def get_runtime(self, request: Request) -> RuntimeDict:
|
||||||
"""Get versions"""
|
"""Get versions"""
|
||||||
return {
|
return {
|
||||||
"python_version": python_version,
|
|
||||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
|
||||||
"environment": get_env(),
|
|
||||||
"architecture": platform.machine(),
|
"architecture": platform.machine(),
|
||||||
|
"authentik_version": get_full_version(),
|
||||||
|
"environment": get_env(),
|
||||||
|
"openssl_fips_enabled": backend._fips_enabled,
|
||||||
|
"openssl_version": OPENSSL_VERSION,
|
||||||
"platform": platform.platform(),
|
"platform": platform.platform(),
|
||||||
|
"python_version": python_version,
|
||||||
"uname": " ".join(platform.uname()),
|
"uname": " ".join(platform.uname()),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -75,7 +75,7 @@ class BlueprintEntry:
|
|||||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||||
|
|
||||||
def __post_init__(self, *args, **kwargs) -> None:
|
def __post_init__(self, *args, **kwargs) -> None:
|
||||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
self.__tag_contexts: list[YAMLTagContext] = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||||
|
|||||||
@ -39,6 +39,14 @@ from authentik.core.models import (
|
|||||||
)
|
)
|
||||||
from authentik.enterprise.license import LicenseKey
|
from authentik.enterprise.license import LicenseKey
|
||||||
from authentik.enterprise.models import LicenseUsage
|
from authentik.enterprise.models import LicenseUsage
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||||
from authentik.events.logs import LogEvent, capture_logs
|
from authentik.events.logs import LogEvent, capture_logs
|
||||||
from authentik.events.models import SystemTask
|
from authentik.events.models import SystemTask
|
||||||
@ -50,7 +58,7 @@ from authentik.outposts.models import OutpostServiceConnection
|
|||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.policies.reputation.models import Reputation
|
from authentik.policies.reputation.models import Reputation
|
||||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
@ -86,10 +94,11 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
# Classes that have other dependencies
|
# Classes that have other dependencies
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
# Classes which are only internally managed
|
# Classes which are only internally managed
|
||||||
|
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||||
FlowToken,
|
FlowToken,
|
||||||
LicenseUsage,
|
LicenseUsage,
|
||||||
SCIMGroup,
|
SCIMProviderGroup,
|
||||||
SCIMUser,
|
SCIMProviderUser,
|
||||||
Tenant,
|
Tenant,
|
||||||
SystemTask,
|
SystemTask,
|
||||||
ConnectionToken,
|
ConnectionToken,
|
||||||
@ -100,6 +109,10 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
WebAuthnDeviceType,
|
WebAuthnDeviceType,
|
||||||
SCIMSourceUser,
|
SCIMSourceUser,
|
||||||
SCIMSourceGroup,
|
SCIMSourceGroup,
|
||||||
|
GoogleWorkspaceProviderUser,
|
||||||
|
GoogleWorkspaceProviderGroup,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
|
from django.db.models import Prefetch
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
@ -166,8 +167,14 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
base_qs = Group.objects.all().select_related("parent").prefetch_related("roles")
|
base_qs = Group.objects.all().select_related("parent").prefetch_related("roles")
|
||||||
|
|
||||||
if self.serializer_class(context={"request": self.request})._should_include_users:
|
if self.serializer_class(context={"request": self.request})._should_include_users:
|
||||||
base_qs = base_qs.prefetch_related("users")
|
base_qs = base_qs.prefetch_related("users")
|
||||||
|
else:
|
||||||
|
base_qs = base_qs.prefetch_related(
|
||||||
|
Prefetch("users", queryset=User.objects.all().only("id"))
|
||||||
|
)
|
||||||
|
|
||||||
return base_qs
|
return base_qs
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@ -178,6 +185,14 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
|||||||
def list(self, request, *args, **kwargs):
|
def list(self, request, *args, **kwargs):
|
||||||
return super().list(request, *args, **kwargs)
|
return super().list(request, *args, **kwargs)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter("include_users", bool, default=True),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def retrieve(self, request, *args, **kwargs):
|
||||||
|
return super().retrieve(request, *args, **kwargs)
|
||||||
|
|
||||||
@permission_required("authentik_core.add_user_to_group")
|
@permission_required("authentik_core.add_user_to_group")
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=UserAccountSerializer,
|
request=UserAccountSerializer,
|
||||||
|
|||||||
79
authentik/core/api/object_types.py
Normal file
79
authentik/core/api/object_types.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
"""API Utilities"""
|
||||||
|
|
||||||
|
from drf_spectacular.utils import extend_schema
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.fields import (
|
||||||
|
BooleanField,
|
||||||
|
CharField,
|
||||||
|
)
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
|
class TypeCreateSerializer(PassiveSerializer):
|
||||||
|
"""Types of an object that can be created"""
|
||||||
|
|
||||||
|
name = CharField(required=True)
|
||||||
|
description = CharField(required=True)
|
||||||
|
component = CharField(required=True)
|
||||||
|
model_name = CharField(required=True)
|
||||||
|
|
||||||
|
icon_url = CharField(required=False)
|
||||||
|
requires_enterprise = BooleanField(default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class CreatableType:
|
||||||
|
"""Class to inherit from to mark a model as creatable, even if the model itself is marked
|
||||||
|
as abstract"""
|
||||||
|
|
||||||
|
|
||||||
|
class NonCreatableType:
|
||||||
|
"""Class to inherit from to mark a model as non-creatable even if it is not abstract"""
|
||||||
|
|
||||||
|
|
||||||
|
class TypesMixin:
|
||||||
|
"""Mixin which adds an API endpoint to list all possible types that can be created"""
|
||||||
|
|
||||||
|
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
||||||
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
|
def types(self, request: Request, additional: list[dict] | None = None) -> Response:
|
||||||
|
"""Get all creatable types"""
|
||||||
|
data = []
|
||||||
|
for subclass in all_subclasses(self.queryset.model):
|
||||||
|
instance = None
|
||||||
|
if subclass._meta.abstract:
|
||||||
|
if not issubclass(subclass, CreatableType):
|
||||||
|
continue
|
||||||
|
# Circumvent the django protection for not being able to instantiate
|
||||||
|
# abstract models. We need a model instance to access .component
|
||||||
|
# and further down .icon_url
|
||||||
|
instance = subclass.__new__(subclass)
|
||||||
|
# Django re-sets abstract = False so we need to override that
|
||||||
|
instance.Meta.abstract = True
|
||||||
|
else:
|
||||||
|
if issubclass(subclass, NonCreatableType):
|
||||||
|
continue
|
||||||
|
instance = subclass()
|
||||||
|
try:
|
||||||
|
data.append(
|
||||||
|
{
|
||||||
|
"name": subclass._meta.verbose_name,
|
||||||
|
"description": subclass.__doc__,
|
||||||
|
"component": instance.component,
|
||||||
|
"model_name": subclass._meta.model_name,
|
||||||
|
"icon_url": getattr(instance, "icon_url", None),
|
||||||
|
"requires_enterprise": isinstance(
|
||||||
|
subclass._meta.app_config, EnterpriseConfig
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except NotImplementedError:
|
||||||
|
continue
|
||||||
|
if additional:
|
||||||
|
data.extend(additional)
|
||||||
|
data = sorted(data, key=lambda x: x["name"])
|
||||||
|
return Response(TypeCreateSerializer(data, many=True).data)
|
||||||
@ -9,18 +9,22 @@ from rest_framework import mixins
|
|||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
from rest_framework.fields import BooleanField, CharField
|
from rest_framework.fields import BooleanField, CharField
|
||||||
|
from rest_framework.relations import PrimaryKeyRelatedField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
from authentik.blueprints.api import ManagedSerializer
|
from authentik.blueprints.api import ManagedSerializer
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import (
|
||||||
|
MetaNameSerializer,
|
||||||
|
PassiveSerializer,
|
||||||
|
)
|
||||||
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||||
from authentik.core.models import PropertyMapping
|
from authentik.core.models import Group, PropertyMapping, User
|
||||||
from authentik.events.utils import sanitize_item
|
from authentik.events.utils import sanitize_item
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
from authentik.policies.api.exec import PolicyTestSerializer
|
from authentik.policies.api.exec import PolicyTestSerializer
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
|
|
||||||
@ -64,6 +68,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
|||||||
|
|
||||||
|
|
||||||
class PropertyMappingViewSet(
|
class PropertyMappingViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -72,7 +77,15 @@ class PropertyMappingViewSet(
|
|||||||
):
|
):
|
||||||
"""PropertyMapping Viewset"""
|
"""PropertyMapping Viewset"""
|
||||||
|
|
||||||
queryset = PropertyMapping.objects.none()
|
class PropertyMappingTestSerializer(PolicyTestSerializer):
|
||||||
|
"""Test property mapping execution for a user/group with context"""
|
||||||
|
|
||||||
|
user = PrimaryKeyRelatedField(queryset=User.objects.all(), required=False, allow_null=True)
|
||||||
|
group = PrimaryKeyRelatedField(
|
||||||
|
queryset=Group.objects.all(), required=False, allow_null=True
|
||||||
|
)
|
||||||
|
|
||||||
|
queryset = PropertyMapping.objects.select_subclasses()
|
||||||
serializer_class = PropertyMappingSerializer
|
serializer_class = PropertyMappingSerializer
|
||||||
search_fields = [
|
search_fields = [
|
||||||
"name",
|
"name",
|
||||||
@ -80,29 +93,9 @@ class PropertyMappingViewSet(
|
|||||||
filterset_fields = {"managed": ["isnull"]}
|
filterset_fields = {"managed": ["isnull"]}
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|
||||||
def get_queryset(self): # pragma: no cover
|
|
||||||
return PropertyMapping.objects.select_subclasses()
|
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable property-mapping types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model):
|
|
||||||
subclass: PropertyMapping
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": subclass().component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|
||||||
@permission_required("authentik_core.view_propertymapping")
|
@permission_required("authentik_core.view_propertymapping")
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=PolicyTestSerializer(),
|
request=PropertyMappingTestSerializer(),
|
||||||
responses={
|
responses={
|
||||||
200: PropertyMappingTestResultSerializer,
|
200: PropertyMappingTestResultSerializer,
|
||||||
400: OpenApiResponse(description="Invalid parameters"),
|
400: OpenApiResponse(description="Invalid parameters"),
|
||||||
@ -120,29 +113,39 @@ class PropertyMappingViewSet(
|
|||||||
"""Test Property Mapping"""
|
"""Test Property Mapping"""
|
||||||
_mapping: PropertyMapping = self.get_object()
|
_mapping: PropertyMapping = self.get_object()
|
||||||
# Use `get_subclass` to get correct class and correct `.evaluate` implementation
|
# Use `get_subclass` to get correct class and correct `.evaluate` implementation
|
||||||
mapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk)
|
mapping: PropertyMapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk)
|
||||||
# FIXME: when we separate policy mappings between ones for sources
|
# FIXME: when we separate policy mappings between ones for sources
|
||||||
# and ones for providers, we need to make the user field optional for the source mapping
|
# and ones for providers, we need to make the user field optional for the source mapping
|
||||||
test_params = PolicyTestSerializer(data=request.data)
|
test_params = self.PropertyMappingTestSerializer(data=request.data)
|
||||||
if not test_params.is_valid():
|
if not test_params.is_valid():
|
||||||
return Response(test_params.errors, status=400)
|
return Response(test_params.errors, status=400)
|
||||||
|
|
||||||
format_result = str(request.GET.get("format_result", "false")).lower() == "true"
|
format_result = str(request.GET.get("format_result", "false")).lower() == "true"
|
||||||
|
|
||||||
# User permission check, only allow mapping testing for users that are readable
|
context: dict = test_params.validated_data.get("context", {})
|
||||||
users = get_objects_for_user(request.user, "authentik_core.view_user").filter(
|
context.setdefault("user", None)
|
||||||
pk=test_params.validated_data["user"].pk
|
|
||||||
)
|
if user := test_params.validated_data.get("user"):
|
||||||
if not users.exists():
|
# User permission check, only allow mapping testing for users that are readable
|
||||||
raise PermissionDenied()
|
users = get_objects_for_user(request.user, "authentik_core.view_user").filter(
|
||||||
|
pk=user.pk
|
||||||
|
)
|
||||||
|
if not users.exists():
|
||||||
|
raise PermissionDenied()
|
||||||
|
context["user"] = user
|
||||||
|
if group := test_params.validated_data.get("group"):
|
||||||
|
# Group permission check, only allow mapping testing for groups that are readable
|
||||||
|
groups = get_objects_for_user(request.user, "authentik_core.view_group").filter(
|
||||||
|
pk=group.pk
|
||||||
|
)
|
||||||
|
if not groups.exists():
|
||||||
|
raise PermissionDenied()
|
||||||
|
context["group"] = group
|
||||||
|
context["request"] = self.request
|
||||||
|
|
||||||
response_data = {"successful": True, "result": ""}
|
response_data = {"successful": True, "result": ""}
|
||||||
try:
|
try:
|
||||||
result = mapping.evaluate(
|
result = mapping.evaluate(**context)
|
||||||
users.first(),
|
|
||||||
self.request,
|
|
||||||
**test_params.validated_data.get("context", {}),
|
|
||||||
)
|
|
||||||
response_data["result"] = dumps(
|
response_data["result"] = dumps(
|
||||||
sanitize_item(result), indent=(4 if format_result else None)
|
sanitize_item(result), indent=(4 if format_result else None)
|
||||||
)
|
)
|
||||||
@ -5,20 +5,15 @@ from django.db.models.query import Q
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django_filters.filters import BooleanFilter
|
from django_filters.filters import BooleanFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from drf_spectacular.utils import extend_schema
|
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.decorators import action
|
|
||||||
from rest_framework.fields import ReadOnlyField
|
from rest_framework.fields import ReadOnlyField
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.enterprise.apps import EnterpriseConfig
|
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
|
|
||||||
|
|
||||||
class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||||
@ -63,8 +58,12 @@ class ProviderFilter(FilterSet):
|
|||||||
"""Filter for providers"""
|
"""Filter for providers"""
|
||||||
|
|
||||||
application__isnull = BooleanFilter(method="filter_application__isnull")
|
application__isnull = BooleanFilter(method="filter_application__isnull")
|
||||||
backchannel_only = BooleanFilter(
|
backchannel = BooleanFilter(
|
||||||
method="filter_backchannel_only",
|
method="filter_backchannel",
|
||||||
|
label=_(
|
||||||
|
"When not set all providers are returned. When set to true, only backchannel "
|
||||||
|
"providers are returned. When set to false, backchannel providers are excluded"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def filter_application__isnull(self, queryset: QuerySet, name, value):
|
def filter_application__isnull(self, queryset: QuerySet, name, value):
|
||||||
@ -75,12 +74,14 @@ class ProviderFilter(FilterSet):
|
|||||||
| Q(application__isnull=value)
|
| Q(application__isnull=value)
|
||||||
)
|
)
|
||||||
|
|
||||||
def filter_backchannel_only(self, queryset: QuerySet, name, value):
|
def filter_backchannel(self, queryset: QuerySet, name, value):
|
||||||
"""Only return backchannel providers"""
|
"""By default all providers are returned. When set to true, only backchannel providers are
|
||||||
|
returned. When set to false, backchannel providers are excluded"""
|
||||||
return queryset.filter(is_backchannel=value)
|
return queryset.filter(is_backchannel=value)
|
||||||
|
|
||||||
|
|
||||||
class ProviderViewSet(
|
class ProviderViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -99,31 +100,3 @@ class ProviderViewSet(
|
|||||||
|
|
||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
return Provider.objects.select_subclasses()
|
return Provider.objects.select_subclasses()
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable provider types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model):
|
|
||||||
subclass: Provider
|
|
||||||
if subclass._meta.abstract:
|
|
||||||
continue
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": subclass().component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
"requires_enterprise": isinstance(subclass._meta.app_config, EnterpriseConfig),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": _("SAML Provider from Metadata"),
|
|
||||||
"description": _("Create a SAML Provider by importing its Metadata."),
|
|
||||||
"component": "ak-provider-saml-import-form",
|
|
||||||
"model_name": "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|||||||
@ -17,8 +17,9 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.core.models import Source, UserSourceConnection
|
from authentik.core.models import Source, UserSourceConnection
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.lib.utils.file import (
|
from authentik.lib.utils.file import (
|
||||||
@ -27,7 +28,6 @@ from authentik.lib.utils.file import (
|
|||||||
set_file,
|
set_file,
|
||||||
set_file_url,
|
set_file_url,
|
||||||
)
|
)
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
|
|
||||||
@ -74,6 +74,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class SourceViewSet(
|
class SourceViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -132,30 +133,6 @@ class SourceViewSet(
|
|||||||
source: Source = self.get_object()
|
source: Source = self.get_object()
|
||||||
return set_file_url(request, source, "icon")
|
return set_file_url(request, source, "icon")
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable source types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model):
|
|
||||||
subclass: Source
|
|
||||||
component = ""
|
|
||||||
if len(subclass.__subclasses__()) > 0:
|
|
||||||
continue
|
|
||||||
if subclass._meta.abstract:
|
|
||||||
component = subclass.__bases__[0]().component
|
|
||||||
else:
|
|
||||||
component = subclass().component
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|
||||||
@extend_schema(responses={200: UserSettingSerializer(many=True)})
|
@extend_schema(responses={200: UserSettingSerializer(many=True)})
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
def user_settings(self, request: Request) -> Response:
|
def user_settings(self, request: Request) -> Response:
|
||||||
|
|||||||
@ -39,12 +39,12 @@ def get_delete_action(manager: Manager) -> str:
|
|||||||
"""Get the delete action from the Foreign key, falls back to cascade"""
|
"""Get the delete action from the Foreign key, falls back to cascade"""
|
||||||
if hasattr(manager, "field"):
|
if hasattr(manager, "field"):
|
||||||
if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__:
|
if manager.field.remote_field.on_delete.__name__ == SET_NULL.__name__:
|
||||||
return DeleteAction.SET_NULL.name
|
return DeleteAction.SET_NULL.value
|
||||||
if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__:
|
if manager.field.remote_field.on_delete.__name__ == SET_DEFAULT.__name__:
|
||||||
return DeleteAction.SET_DEFAULT.name
|
return DeleteAction.SET_DEFAULT.value
|
||||||
if hasattr(manager, "source_field"):
|
if hasattr(manager, "source_field"):
|
||||||
return DeleteAction.CASCADE_MANY.name
|
return DeleteAction.CASCADE_MANY.value
|
||||||
return DeleteAction.CASCADE.name
|
return DeleteAction.CASCADE.value
|
||||||
|
|
||||||
|
|
||||||
class UsedByMixin:
|
class UsedByMixin:
|
||||||
|
|||||||
@ -6,8 +6,16 @@ from django.db.models import Model
|
|||||||
from drf_spectacular.extensions import OpenApiSerializerFieldExtension
|
from drf_spectacular.extensions import OpenApiSerializerFieldExtension
|
||||||
from drf_spectacular.plumbing import build_basic_type
|
from drf_spectacular.plumbing import build_basic_type
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from rest_framework.fields import BooleanField, CharField, IntegerField, JSONField
|
from rest_framework.fields import (
|
||||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
CharField,
|
||||||
|
IntegerField,
|
||||||
|
JSONField,
|
||||||
|
SerializerMethodField,
|
||||||
|
)
|
||||||
|
from rest_framework.serializers import (
|
||||||
|
Serializer,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_dict(value: Any):
|
def is_dict(value: Any):
|
||||||
@ -68,16 +76,6 @@ class MetaNameSerializer(PassiveSerializer):
|
|||||||
return f"{obj._meta.app_label}.{obj._meta.model_name}"
|
return f"{obj._meta.app_label}.{obj._meta.model_name}"
|
||||||
|
|
||||||
|
|
||||||
class TypeCreateSerializer(PassiveSerializer):
|
|
||||||
"""Types of an object that can be created"""
|
|
||||||
|
|
||||||
name = CharField(required=True)
|
|
||||||
description = CharField(required=True)
|
|
||||||
component = CharField(required=True)
|
|
||||||
model_name = CharField(required=True)
|
|
||||||
requires_enterprise = BooleanField(default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class CacheSerializer(PassiveSerializer):
|
class CacheSerializer(PassiveSerializer):
|
||||||
"""Generic cache stats for an object"""
|
"""Generic cache stats for an object"""
|
||||||
|
|
||||||
|
|||||||
@ -31,8 +31,9 @@ class InbuiltBackend(ModelBackend):
|
|||||||
# Since we can't directly pass other variables to signals, and we want to log the method
|
# Since we can't directly pass other variables to signals, and we want to log the method
|
||||||
# and the token used, we assume we're running in a flow and set a variable in the context
|
# and the token used, we assume we're running in a flow and set a variable in the context
|
||||||
flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan(""))
|
flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan(""))
|
||||||
flow_plan.context[PLAN_CONTEXT_METHOD] = method
|
flow_plan.context.setdefault(PLAN_CONTEXT_METHOD, method)
|
||||||
flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs))
|
flow_plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
|
flow_plan.context[PLAN_CONTEXT_METHOD_ARGS].update(cleanse_dict(sanitize_dict(kwargs)))
|
||||||
request.session[SESSION_KEY_PLAN] = flow_plan
|
request.session[SESSION_KEY_PLAN] = flow_plan
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
"""Property Mapping Evaluator"""
|
"""Property Mapping Evaluator"""
|
||||||
|
|
||||||
|
from types import CodeType
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
@ -24,6 +25,8 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
"""Custom Evaluator that adds some different context variables."""
|
"""Custom Evaluator that adds some different context variables."""
|
||||||
|
|
||||||
dry_run: bool
|
dry_run: bool
|
||||||
|
model: Model
|
||||||
|
_compiled: CodeType | None = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -33,23 +36,32 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
dry_run: bool | None = False,
|
dry_run: bool | None = False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
|
self.model = model
|
||||||
if hasattr(model, "name"):
|
if hasattr(model, "name"):
|
||||||
_filename = model.name
|
_filename = model.name
|
||||||
else:
|
else:
|
||||||
_filename = str(model)
|
_filename = str(model)
|
||||||
super().__init__(filename=_filename)
|
super().__init__(filename=_filename)
|
||||||
|
self.dry_run = dry_run
|
||||||
|
self.set_context(user, request, **kwargs)
|
||||||
|
|
||||||
|
def set_context(
|
||||||
|
self,
|
||||||
|
user: User | None = None,
|
||||||
|
request: HttpRequest | None = None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
req = PolicyRequest(user=User())
|
req = PolicyRequest(user=User())
|
||||||
req.obj = model
|
req.obj = self.model
|
||||||
if user:
|
if user:
|
||||||
req.user = user
|
req.user = user
|
||||||
self._context["user"] = user
|
self._context["user"] = user
|
||||||
if request:
|
if request:
|
||||||
req.http_request = request
|
req.http_request = request
|
||||||
self._context["request"] = req
|
|
||||||
req.context.update(**kwargs)
|
req.context.update(**kwargs)
|
||||||
|
self._context["request"] = req
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
self._globals["SkipObject"] = SkipObjectException
|
self._globals["SkipObject"] = SkipObjectException
|
||||||
self.dry_run = dry_run
|
|
||||||
|
|
||||||
def handle_error(self, exc: Exception, expression_source: str):
|
def handle_error(self, exc: Exception, expression_source: str):
|
||||||
"""Exception Handler"""
|
"""Exception Handler"""
|
||||||
@ -71,3 +83,9 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
def evaluate(self, *args, **kwargs) -> Any:
|
def evaluate(self, *args, **kwargs) -> Any:
|
||||||
with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time():
|
with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time():
|
||||||
return super().evaluate(*args, **kwargs)
|
return super().evaluate(*args, **kwargs)
|
||||||
|
|
||||||
|
def compile(self, expression: str | None = None) -> Any:
|
||||||
|
if not self._compiled:
|
||||||
|
compiled = super().compile(expression or self.model.expression)
|
||||||
|
self._compiled = compiled
|
||||||
|
return self._compiled
|
||||||
|
|||||||
@ -6,6 +6,11 @@ from authentik.lib.sentry import SentryIgnoredException
|
|||||||
class PropertyMappingExpressionException(SentryIgnoredException):
|
class PropertyMappingExpressionException(SentryIgnoredException):
|
||||||
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""
|
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""
|
||||||
|
|
||||||
|
def __init__(self, exc: Exception, mapping) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.exc = exc
|
||||||
|
self.mapping = mapping
|
||||||
|
|
||||||
|
|
||||||
class SkipObjectException(PropertyMappingExpressionException):
|
class SkipObjectException(PropertyMappingExpressionException):
|
||||||
"""Exception which can be raised in a property mapping to skip syncing an object.
|
"""Exception which can be raised in a property mapping to skip syncing an object.
|
||||||
|
|||||||
@ -15,6 +15,7 @@ from django.http import HttpRequest
|
|||||||
from django.utils.functional import SimpleLazyObject, cached_property
|
from django.utils.functional import SimpleLazyObject, cached_property
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django_cte import CTEQuerySet, With
|
||||||
from guardian.conf import settings
|
from guardian.conf import settings
|
||||||
from guardian.mixins import GuardianUserMixin
|
from guardian.mixins import GuardianUserMixin
|
||||||
from model_utils.managers import InheritanceManager
|
from model_utils.managers import InheritanceManager
|
||||||
@ -56,6 +57,8 @@ options.DEFAULT_NAMES = options.DEFAULT_NAMES + (
|
|||||||
"authentik_used_by_shadows",
|
"authentik_used_by_shadows",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
GROUP_RECURSION_LIMIT = 20
|
||||||
|
|
||||||
|
|
||||||
def default_token_duration() -> datetime:
|
def default_token_duration() -> datetime:
|
||||||
"""Default duration a Token is valid"""
|
"""Default duration a Token is valid"""
|
||||||
@ -96,6 +99,40 @@ class UserTypes(models.TextChoices):
|
|||||||
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
|
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
|
||||||
|
|
||||||
|
|
||||||
|
class GroupQuerySet(CTEQuerySet):
|
||||||
|
def with_children_recursive(self):
|
||||||
|
"""Recursively get all groups that have the current queryset as parents
|
||||||
|
or are indirectly related."""
|
||||||
|
|
||||||
|
def make_cte(cte):
|
||||||
|
"""Build the query that ends up in WITH RECURSIVE"""
|
||||||
|
# Start from self, aka the current query
|
||||||
|
# Add a depth attribute to limit the recursion
|
||||||
|
return self.annotate(
|
||||||
|
relative_depth=models.Value(0, output_field=models.IntegerField())
|
||||||
|
).union(
|
||||||
|
# Here is the recursive part of the query. cte refers to the previous iteration
|
||||||
|
# Only select groups for which the parent is part of the previous iteration
|
||||||
|
# and increase the depth
|
||||||
|
# Finally, limit the depth
|
||||||
|
cte.join(Group, group_uuid=cte.col.parent_id)
|
||||||
|
.annotate(
|
||||||
|
relative_depth=models.ExpressionWrapper(
|
||||||
|
cte.col.relative_depth
|
||||||
|
+ models.Value(1, output_field=models.IntegerField()),
|
||||||
|
output_field=models.IntegerField(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.filter(relative_depth__lt=GROUP_RECURSION_LIMIT),
|
||||||
|
all=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build the recursive query, see above
|
||||||
|
cte = With.recursive(make_cte)
|
||||||
|
# Return the result, as a usable queryset for Group.
|
||||||
|
return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte)
|
||||||
|
|
||||||
|
|
||||||
class Group(SerializerModel):
|
class Group(SerializerModel):
|
||||||
"""Group model which supports a basic hierarchy and has attributes"""
|
"""Group model which supports a basic hierarchy and has attributes"""
|
||||||
|
|
||||||
@ -118,6 +155,8 @@ class Group(SerializerModel):
|
|||||||
)
|
)
|
||||||
attributes = models.JSONField(default=dict, blank=True)
|
attributes = models.JSONField(default=dict, blank=True)
|
||||||
|
|
||||||
|
objects = GroupQuerySet.as_manager()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Serializer:
|
def serializer(self) -> Serializer:
|
||||||
from authentik.core.api.groups import GroupSerializer
|
from authentik.core.api.groups import GroupSerializer
|
||||||
@ -136,36 +175,11 @@ class Group(SerializerModel):
|
|||||||
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
||||||
|
|
||||||
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
|
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
|
||||||
"""Recursively get all groups that have this as parent or are indirectly related"""
|
"""Compatibility layer for Group.objects.with_children_recursive()"""
|
||||||
direct_groups = []
|
qs = self
|
||||||
if isinstance(self, QuerySet):
|
if not isinstance(self, QuerySet):
|
||||||
direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator())
|
qs = Group.objects.filter(group_uuid=self.group_uuid)
|
||||||
else:
|
return qs.with_children_recursive()
|
||||||
direct_groups = [self.pk]
|
|
||||||
if len(direct_groups) < 1:
|
|
||||||
return Group.objects.none()
|
|
||||||
query = """
|
|
||||||
WITH RECURSIVE parents AS (
|
|
||||||
SELECT authentik_core_group.*, 0 AS relative_depth
|
|
||||||
FROM authentik_core_group
|
|
||||||
WHERE authentik_core_group.group_uuid = ANY(%s)
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT authentik_core_group.*, parents.relative_depth + 1
|
|
||||||
FROM authentik_core_group, parents
|
|
||||||
WHERE (
|
|
||||||
authentik_core_group.group_uuid = parents.parent_id and
|
|
||||||
parents.relative_depth < 20
|
|
||||||
)
|
|
||||||
)
|
|
||||||
SELECT group_uuid
|
|
||||||
FROM parents
|
|
||||||
GROUP BY group_uuid, name
|
|
||||||
ORDER BY name;
|
|
||||||
"""
|
|
||||||
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
|
|
||||||
return Group.objects.filter(pk__in=group_pks)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Group {self.name}"
|
return f"Group {self.name}"
|
||||||
@ -232,10 +246,8 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
return User._meta.get_field("path").default
|
return User._meta.get_field("path").default
|
||||||
|
|
||||||
def all_groups(self) -> QuerySet[Group]:
|
def all_groups(self) -> QuerySet[Group]:
|
||||||
"""Recursively get all groups this user is a member of.
|
"""Recursively get all groups this user is a member of."""
|
||||||
At least one query is done to get the direct groups of the user, with groups
|
return self.ak_groups.all().with_children_recursive()
|
||||||
there are at most 3 queries done"""
|
|
||||||
return Group.children_recursive(self.ak_groups.all())
|
|
||||||
|
|
||||||
def group_attributes(self, request: HttpRequest | None = None) -> dict[str, Any]:
|
def group_attributes(self, request: HttpRequest | None = None) -> dict[str, Any]:
|
||||||
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
||||||
@ -377,6 +389,10 @@ class Provider(SerializerModel):
|
|||||||
Can return None for providers that are not URL-based"""
|
Can return None for providers that are not URL-based"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component(self) -> str:
|
def component(self) -> str:
|
||||||
"""Return component used to edit this object"""
|
"""Return component used to edit this object"""
|
||||||
@ -768,7 +784,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
|||||||
try:
|
try:
|
||||||
return evaluator.evaluate(self.expression)
|
return evaluator.evaluate(self.expression)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise PropertyMappingExpressionException(exc) from exc
|
raise PropertyMappingExpressionException(self, exc) from exc
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Property Mapping {self.name}"
|
return f"Property Mapping {self.name}"
|
||||||
|
|||||||
@ -13,7 +13,7 @@ from django.utils.translation import gettext as _
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection
|
from authentik.core.models import Source, SourceUserMatchingModes, User, UserSourceConnection
|
||||||
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage
|
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostSourceStage
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.exceptions import FlowNonApplicableException
|
from authentik.flows.exceptions import FlowNonApplicableException
|
||||||
from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage
|
from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage
|
||||||
@ -206,13 +206,9 @@ class SourceFlowManager:
|
|||||||
|
|
||||||
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||||
"""Hook to override stages which are appended to the flow"""
|
"""Hook to override stages which are appended to the flow"""
|
||||||
if not self.source.enrollment_flow:
|
return [
|
||||||
return []
|
in_memory_stage(PostSourceStage),
|
||||||
if flow.slug == self.source.enrollment_flow.slug:
|
]
|
||||||
return [
|
|
||||||
in_memory_stage(PostUserEnrollmentStage),
|
|
||||||
]
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _prepare_flow(
|
def _prepare_flow(
|
||||||
self,
|
self,
|
||||||
@ -266,6 +262,9 @@ class SourceFlowManager:
|
|||||||
)
|
)
|
||||||
# We run the Flow planner here so we can pass the Pending user in the context
|
# We run the Flow planner here so we can pass the Pending user in the context
|
||||||
planner = FlowPlanner(flow)
|
planner = FlowPlanner(flow)
|
||||||
|
# We append some stages so the initial flow we get might be empty
|
||||||
|
planner.allow_empty_flows = True
|
||||||
|
planner.use_cache = False
|
||||||
plan = planner.plan(self.request, kwargs)
|
plan = planner.plan(self.request, kwargs)
|
||||||
for stage in self.get_stages_to_append(flow):
|
for stage in self.get_stages_to_append(flow):
|
||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
@ -324,7 +323,7 @@ class SourceFlowManager:
|
|||||||
reverse(
|
reverse(
|
||||||
"authentik_core:if-user",
|
"authentik_core:if-user",
|
||||||
)
|
)
|
||||||
+ f"#/settings;page-{self.source.slug}"
|
+ "#/settings;page-sources"
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle_enroll(
|
def handle_enroll(
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from authentik.flows.stage import StageView
|
|||||||
PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection"
|
PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection"
|
||||||
|
|
||||||
|
|
||||||
class PostUserEnrollmentStage(StageView):
|
class PostSourceStage(StageView):
|
||||||
"""Dynamically injected stage which saves the Connection after
|
"""Dynamically injected stage which saves the Connection after
|
||||||
the user has been enrolled."""
|
the user has been enrolled."""
|
||||||
|
|
||||||
@ -21,10 +21,12 @@ class PostUserEnrollmentStage(StageView):
|
|||||||
]
|
]
|
||||||
user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
||||||
connection.user = user
|
connection.user = user
|
||||||
|
linked = connection.pk is None
|
||||||
connection.save()
|
connection.save()
|
||||||
Event.new(
|
if linked:
|
||||||
EventAction.SOURCE_LINKED,
|
Event.new(
|
||||||
message="Linked Source",
|
EventAction.SOURCE_LINKED,
|
||||||
source=connection.source,
|
message="Linked Source",
|
||||||
).from_http(self.request)
|
source=connection.source,
|
||||||
|
).from_http(self.request)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|||||||
@ -23,6 +23,17 @@ class TestGroupsAPI(APITestCase):
|
|||||||
response = self.client.get(reverse("authentik_api:group-list"), {"include_users": "true"})
|
response = self.client.get(reverse("authentik_api:group-list"), {"include_users": "true"})
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
def test_retrieve_with_users(self):
|
||||||
|
"""Test retrieve with users"""
|
||||||
|
admin = create_test_admin_user()
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
self.client.force_login(admin)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||||
|
{"include_users": "true"},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
def test_add_user(self):
|
def test_add_user(self):
|
||||||
"""Test add_user"""
|
"""Test add_user"""
|
||||||
group = Group.objects.create(name=generate_id())
|
group = Group.objects.create(name=generate_id())
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
"""authentik core models tests"""
|
"""authentik core models tests"""
|
||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from time import sleep
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
from freezegun import freeze_time
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import Provider, Source, Token
|
from authentik.core.models import Provider, Source, Token
|
||||||
from authentik.flows.models import Stage
|
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
@ -17,18 +17,20 @@ class TestModels(TestCase):
|
|||||||
|
|
||||||
def test_token_expire(self):
|
def test_token_expire(self):
|
||||||
"""Test token expiring"""
|
"""Test token expiring"""
|
||||||
token = Token.objects.create(expires=now(), user=get_anonymous_user())
|
with freeze_time() as freeze:
|
||||||
sleep(0.5)
|
token = Token.objects.create(expires=now(), user=get_anonymous_user())
|
||||||
self.assertTrue(token.is_expired)
|
freeze.tick(timedelta(seconds=1))
|
||||||
|
self.assertTrue(token.is_expired)
|
||||||
|
|
||||||
def test_token_expire_no_expire(self):
|
def test_token_expire_no_expire(self):
|
||||||
"""Test token expiring with "expiring" set"""
|
"""Test token expiring with "expiring" set"""
|
||||||
token = Token.objects.create(expires=now(), user=get_anonymous_user(), expiring=False)
|
with freeze_time() as freeze:
|
||||||
sleep(0.5)
|
token = Token.objects.create(expires=now(), user=get_anonymous_user(), expiring=False)
|
||||||
self.assertFalse(token.is_expired)
|
freeze.tick(timedelta(seconds=1))
|
||||||
|
self.assertFalse(token.is_expired)
|
||||||
|
|
||||||
|
|
||||||
def source_tester_factory(test_model: type[Stage]) -> Callable:
|
def source_tester_factory(test_model: type[Source]) -> Callable:
|
||||||
"""Test source"""
|
"""Test source"""
|
||||||
|
|
||||||
factory = RequestFactory()
|
factory = RequestFactory()
|
||||||
@ -36,19 +38,19 @@ def source_tester_factory(test_model: type[Stage]) -> Callable:
|
|||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
model_class = None
|
model_class = None
|
||||||
if test_model._meta.abstract: # pragma: no cover
|
if test_model._meta.abstract:
|
||||||
model_class = test_model.__bases__[0]()
|
model_class = [x for x in test_model.__bases__ if issubclass(x, Source)][0]()
|
||||||
else:
|
else:
|
||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
model_class.slug = "test"
|
model_class.slug = "test"
|
||||||
self.assertIsNotNone(model_class.component)
|
self.assertIsNotNone(model_class.component)
|
||||||
_ = model_class.ui_login_button(request)
|
model_class.ui_login_button(request)
|
||||||
_ = model_class.ui_user_settings()
|
model_class.ui_user_settings()
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
|
||||||
def provider_tester_factory(test_model: type[Stage]) -> Callable:
|
def provider_tester_factory(test_model: type[Provider]) -> Callable:
|
||||||
"""Test provider"""
|
"""Test provider"""
|
||||||
|
|
||||||
def tester(self: TestModels):
|
def tester(self: TestModels):
|
||||||
|
|||||||
@ -6,9 +6,10 @@ from django.urls import reverse
|
|||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||||
from authentik.core.models import PropertyMapping
|
from authentik.core.models import Group, PropertyMapping
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
class TestPropertyMappingAPI(APITestCase):
|
class TestPropertyMappingAPI(APITestCase):
|
||||||
@ -16,23 +17,40 @@ class TestPropertyMappingAPI(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.mapping = PropertyMapping.objects.create(
|
|
||||||
name="dummy", expression="""return {'foo': 'bar'}"""
|
|
||||||
)
|
|
||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
def test_test_call(self):
|
def test_test_call(self):
|
||||||
"""Test PropertMappings's test endpoint"""
|
"""Test PropertyMappings's test endpoint"""
|
||||||
|
mapping = PropertyMapping.objects.create(
|
||||||
|
name="dummy", expression="""return {'foo': 'bar', 'baz': user.username}"""
|
||||||
|
)
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse("authentik_api:propertymapping-test", kwargs={"pk": self.mapping.pk}),
|
reverse("authentik_api:propertymapping-test", kwargs={"pk": mapping.pk}),
|
||||||
data={
|
data={
|
||||||
"user": self.user.pk,
|
"user": self.user.pk,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
response.content.decode(),
|
response.content.decode(),
|
||||||
{"result": dumps({"foo": "bar"}), "successful": True},
|
{"result": dumps({"foo": "bar", "baz": self.user.username}), "successful": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_test_call_group(self):
|
||||||
|
"""Test PropertyMappings's test endpoint"""
|
||||||
|
mapping = PropertyMapping.objects.create(
|
||||||
|
name="dummy", expression="""return {'foo': 'bar', 'baz': group.name}"""
|
||||||
|
)
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:propertymapping-test", kwargs={"pk": mapping.pk}),
|
||||||
|
data={
|
||||||
|
"group": group.pk,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"result": dumps({"foo": "bar", "baz": group.name}), "successful": True},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_validate(self):
|
def test_validate(self):
|
||||||
|
|||||||
@ -2,11 +2,15 @@
|
|||||||
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from django.urls import reverse
|
||||||
from guardian.utils import get_anonymous_user
|
from guardian.utils import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import SourceUserMatchingModes, User
|
from authentik.core.models import SourceUserMatchingModes, User
|
||||||
from authentik.core.sources.flow_manager import Action
|
from authentik.core.sources.flow_manager import Action
|
||||||
|
from authentik.core.sources.stage import PostSourceStage
|
||||||
from authentik.core.tests.utils import create_test_flow
|
from authentik.core.tests.utils import create_test_flow
|
||||||
|
from authentik.flows.planner import FlowPlan
|
||||||
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.tests.utils import get_request
|
from authentik.lib.tests.utils import get_request
|
||||||
from authentik.policies.denied import AccessDeniedResponse
|
from authentik.policies.denied import AccessDeniedResponse
|
||||||
@ -21,41 +25,55 @@ class TestSourceFlowManager(TestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.source: OAuthSource = OAuthSource.objects.create(name="test")
|
self.authentication_flow = create_test_flow()
|
||||||
|
self.enrollment_flow = create_test_flow()
|
||||||
|
self.source: OAuthSource = OAuthSource.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
authentication_flow=self.authentication_flow,
|
||||||
|
enrollment_flow=self.enrollment_flow,
|
||||||
|
)
|
||||||
self.identifier = generate_id()
|
self.identifier = generate_id()
|
||||||
|
|
||||||
def test_unauthenticated_enroll(self):
|
def test_unauthenticated_enroll(self):
|
||||||
"""Test un-authenticated user enrolling"""
|
"""Test un-authenticated user enrolling"""
|
||||||
flow_manager = OAuthSourceFlowManager(
|
request = get_request("/", user=AnonymousUser())
|
||||||
self.source, get_request("/", user=AnonymousUser()), self.identifier, {}
|
flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {})
|
||||||
)
|
|
||||||
action, _ = flow_manager.get_action()
|
action, _ = flow_manager.get_action()
|
||||||
self.assertEqual(action, Action.ENROLL)
|
self.assertEqual(action, Action.ENROLL)
|
||||||
flow_manager.get_flow()
|
response = flow_manager.get_flow()
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||||
|
self.assertEqual(flow_plan.bindings[0].stage.view, PostSourceStage)
|
||||||
|
|
||||||
def test_unauthenticated_auth(self):
|
def test_unauthenticated_auth(self):
|
||||||
"""Test un-authenticated user authenticating"""
|
"""Test un-authenticated user authenticating"""
|
||||||
UserOAuthSourceConnection.objects.create(
|
UserOAuthSourceConnection.objects.create(
|
||||||
user=get_anonymous_user(), source=self.source, identifier=self.identifier
|
user=get_anonymous_user(), source=self.source, identifier=self.identifier
|
||||||
)
|
)
|
||||||
|
request = get_request("/", user=AnonymousUser())
|
||||||
flow_manager = OAuthSourceFlowManager(
|
flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {})
|
||||||
self.source, get_request("/", user=AnonymousUser()), self.identifier, {}
|
|
||||||
)
|
|
||||||
action, _ = flow_manager.get_action()
|
action, _ = flow_manager.get_action()
|
||||||
self.assertEqual(action, Action.AUTH)
|
self.assertEqual(action, Action.AUTH)
|
||||||
flow_manager.get_flow()
|
response = flow_manager.get_flow()
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||||
|
self.assertEqual(flow_plan.bindings[0].stage.view, PostSourceStage)
|
||||||
|
|
||||||
def test_authenticated_link(self):
|
def test_authenticated_link(self):
|
||||||
"""Test authenticated user linking"""
|
"""Test authenticated user linking"""
|
||||||
user = User.objects.create(username="foo", email="foo@bar.baz")
|
user = User.objects.create(username="foo", email="foo@bar.baz")
|
||||||
flow_manager = OAuthSourceFlowManager(
|
request = get_request("/", user=user)
|
||||||
self.source, get_request("/", user=user), self.identifier, {}
|
flow_manager = OAuthSourceFlowManager(self.source, request, self.identifier, {})
|
||||||
)
|
|
||||||
action, connection = flow_manager.get_action()
|
action, connection = flow_manager.get_action()
|
||||||
self.assertEqual(action, Action.LINK)
|
self.assertEqual(action, Action.LINK)
|
||||||
self.assertIsNone(connection.pk)
|
self.assertIsNone(connection.pk)
|
||||||
flow_manager.get_flow()
|
response = flow_manager.get_flow()
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
self.assertEqual(
|
||||||
|
response.url,
|
||||||
|
reverse("authentik_core:if-user") + "#/settings;page-sources",
|
||||||
|
)
|
||||||
|
|
||||||
def test_unauthenticated_link(self):
|
def test_unauthenticated_link(self):
|
||||||
"""Test un-authenticated user linking"""
|
"""Test un-authenticated user linking"""
|
||||||
|
|||||||
@ -42,8 +42,8 @@ class TestUsersAvatars(APITestCase):
|
|||||||
with Mocker() as mocker:
|
with Mocker() as mocker:
|
||||||
mocker.head(
|
mocker.head(
|
||||||
(
|
(
|
||||||
"https://secure.gravatar.com/avatar/84730f9c1851d1ea03f1a"
|
"https://www.gravatar.com/avatar/76eb3c74c8beb6faa037f1b6e2ecb3e252bdac"
|
||||||
"a9ed85bd1ea?size=158&rating=g&default=404"
|
"6cf71fb567ae36025a9d4ea86b?size=158&rating=g&default=404"
|
||||||
),
|
),
|
||||||
text="foo",
|
text="foo",
|
||||||
)
|
)
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from authentik.core.api.applications import ApplicationViewSet
|
|||||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||||
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
||||||
from authentik.core.api.groups import GroupViewSet
|
from authentik.core.api.groups import GroupViewSet
|
||||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
from authentik.core.api.property_mappings import PropertyMappingViewSet
|
||||||
from authentik.core.api.providers import ProviderViewSet
|
from authentik.core.api.providers import ProviderViewSet
|
||||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||||
from authentik.core.api.tokens import TokenViewSet
|
from authentik.core.api.tokens import TokenViewSet
|
||||||
|
|||||||
@ -92,7 +92,11 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
@property
|
@property
|
||||||
def kid(self):
|
def kid(self):
|
||||||
"""Get Key ID used for JWKS"""
|
"""Get Key ID used for JWKS"""
|
||||||
return md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
|
return (
|
||||||
|
md5(self.key_data.encode("utf-8"), usedforsecurity=False).hexdigest()
|
||||||
|
if self.key_data
|
||||||
|
else ""
|
||||||
|
) # nosec
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"Certificate-Key Pair {self.name}"
|
return f"Certificate-Key Pair {self.name}"
|
||||||
|
|||||||
@ -241,7 +241,7 @@ class TestCrypto(APITestCase):
|
|||||||
"model_name": "oauth2provider",
|
"model_name": "oauth2provider",
|
||||||
"pk": str(provider.pk),
|
"pk": str(provider.pk),
|
||||||
"name": str(provider),
|
"name": str(provider),
|
||||||
"action": DeleteAction.SET_NULL.name,
|
"action": DeleteAction.SET_NULL.value,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|||||||
@ -0,0 +1,45 @@
|
|||||||
|
"""GoogleWorkspaceProviderGroup API Views"""
|
||||||
|
|
||||||
|
from rest_framework import mixins
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.core.api.users import UserGroupSerializer
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderGroup
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderGroupSerializer(ModelSerializer):
|
||||||
|
"""GoogleWorkspaceProviderGroup Serializer"""
|
||||||
|
|
||||||
|
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = GoogleWorkspaceProviderGroup
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"google_id",
|
||||||
|
"group",
|
||||||
|
"group_obj",
|
||||||
|
"provider",
|
||||||
|
"attributes",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderGroupViewSet(
|
||||||
|
mixins.CreateModelMixin,
|
||||||
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.DestroyModelMixin,
|
||||||
|
UsedByMixin,
|
||||||
|
mixins.ListModelMixin,
|
||||||
|
GenericViewSet,
|
||||||
|
):
|
||||||
|
"""GoogleWorkspaceProviderGroup Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProviderGroup.objects.all().select_related("group")
|
||||||
|
serializer_class = GoogleWorkspaceProviderGroupSerializer
|
||||||
|
filterset_fields = ["provider__id", "group__name", "group__group_uuid"]
|
||||||
|
search_fields = ["provider__name", "group__name"]
|
||||||
|
ordering = ["group__name"]
|
||||||
@ -6,21 +6,21 @@ from drf_spectacular.types import OpenApiTypes
|
|||||||
from drf_spectacular.utils import extend_schema_field
|
from drf_spectacular.utils import extend_schema_field
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderMapping
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderMapping
|
||||||
|
|
||||||
|
|
||||||
class GoogleProviderMappingSerializer(PropertyMappingSerializer):
|
class GoogleWorkspaceProviderMappingSerializer(PropertyMappingSerializer):
|
||||||
"""GoogleProviderMapping Serializer"""
|
"""GoogleWorkspaceProviderMapping Serializer"""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = GoogleWorkspaceProviderMapping
|
model = GoogleWorkspaceProviderMapping
|
||||||
fields = PropertyMappingSerializer.Meta.fields
|
fields = PropertyMappingSerializer.Meta.fields
|
||||||
|
|
||||||
|
|
||||||
class GoogleProviderMappingFilter(FilterSet):
|
class GoogleWorkspaceProviderMappingFilter(FilterSet):
|
||||||
"""Filter for GoogleProviderMapping"""
|
"""Filter for GoogleWorkspaceProviderMapping"""
|
||||||
|
|
||||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
@ -29,11 +29,11 @@ class GoogleProviderMappingFilter(FilterSet):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class GoogleProviderMappingViewSet(UsedByMixin, ModelViewSet):
|
class GoogleWorkspaceProviderMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""GoogleProviderMapping Viewset"""
|
"""GoogleWorkspaceProviderMapping Viewset"""
|
||||||
|
|
||||||
queryset = GoogleWorkspaceProviderMapping.objects.all()
|
queryset = GoogleWorkspaceProviderMapping.objects.all()
|
||||||
serializer_class = GoogleProviderMappingSerializer
|
serializer_class = GoogleWorkspaceProviderMappingSerializer
|
||||||
filterset_class = GoogleProviderMappingFilter
|
filterset_class = GoogleWorkspaceProviderMappingFilter
|
||||||
search_fields = ["name"]
|
search_fields = ["name"]
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|||||||
@ -10,8 +10,8 @@ from authentik.enterprise.providers.google_workspace.tasks import google_workspa
|
|||||||
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
||||||
|
|
||||||
|
|
||||||
class GoogleProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
class GoogleWorkspaceProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
"""GoogleProvider Serializer"""
|
"""GoogleWorkspaceProvider Serializer"""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = GoogleWorkspaceProvider
|
model = GoogleWorkspaceProvider
|
||||||
@ -38,11 +38,11 @@ class GoogleProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
|||||||
extra_kwargs = {}
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
class GoogleProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
|
class GoogleWorkspaceProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
|
||||||
"""GoogleProvider Viewset"""
|
"""GoogleWorkspaceProvider Viewset"""
|
||||||
|
|
||||||
queryset = GoogleWorkspaceProvider.objects.all()
|
queryset = GoogleWorkspaceProvider.objects.all()
|
||||||
serializer_class = GoogleProviderSerializer
|
serializer_class = GoogleWorkspaceProviderSerializer
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
"name",
|
"name",
|
||||||
"exclude_users_service_account",
|
"exclude_users_service_account",
|
||||||
|
|||||||
45
authentik/enterprise/providers/google_workspace/api/users.py
Normal file
45
authentik/enterprise/providers/google_workspace/api/users.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
"""GoogleWorkspaceProviderUser API Views"""
|
||||||
|
|
||||||
|
from rest_framework import mixins
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderUser
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderUserSerializer(ModelSerializer):
|
||||||
|
"""GoogleWorkspaceProviderUser Serializer"""
|
||||||
|
|
||||||
|
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = GoogleWorkspaceProviderUser
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"google_id",
|
||||||
|
"user",
|
||||||
|
"user_obj",
|
||||||
|
"provider",
|
||||||
|
"attributes",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleWorkspaceProviderUserViewSet(
|
||||||
|
mixins.CreateModelMixin,
|
||||||
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.DestroyModelMixin,
|
||||||
|
UsedByMixin,
|
||||||
|
mixins.ListModelMixin,
|
||||||
|
GenericViewSet,
|
||||||
|
):
|
||||||
|
"""GoogleWorkspaceProviderUser Viewset"""
|
||||||
|
|
||||||
|
queryset = GoogleWorkspaceProviderUser.objects.all().select_related("user")
|
||||||
|
serializer_class = GoogleWorkspaceProviderUserSerializer
|
||||||
|
filterset_fields = ["provider__id", "user__username", "user__id"]
|
||||||
|
search_fields = ["provider__name", "user__username"]
|
||||||
|
ordering = ["user__username"]
|
||||||
@ -1,5 +1,5 @@
|
|||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.http import HttpResponseNotFound
|
from django.http import HttpResponseBadRequest, HttpResponseNotFound
|
||||||
from google.auth.exceptions import GoogleAuthError, TransportError
|
from google.auth.exceptions import GoogleAuthError, TransportError
|
||||||
from googleapiclient.discovery import build
|
from googleapiclient.discovery import build
|
||||||
from googleapiclient.errors import Error, HttpError
|
from googleapiclient.errors import Error, HttpError
|
||||||
@ -10,6 +10,7 @@ from authentik.enterprise.providers.google_workspace.models import GoogleWorkspa
|
|||||||
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
from authentik.lib.sync.outgoing.exceptions import (
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
NotFoundSyncException,
|
NotFoundSyncException,
|
||||||
ObjectExistsSyncException,
|
ObjectExistsSyncException,
|
||||||
StopSync,
|
StopSync,
|
||||||
@ -50,22 +51,24 @@ class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict
|
|||||||
raise StopSync(exc) from exc
|
raise StopSync(exc) from exc
|
||||||
except HttpLib2Error as exc:
|
except HttpLib2Error as exc:
|
||||||
if isinstance(exc, HttpLib2ErrorWithResponse):
|
if isinstance(exc, HttpLib2ErrorWithResponse):
|
||||||
self._response_handle_status_code(exc.response.status, exc)
|
self._response_handle_status_code(request.body, exc.response.status, exc)
|
||||||
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
except HttpError as exc:
|
except HttpError as exc:
|
||||||
self._response_handle_status_code(exc.status_code, exc)
|
self._response_handle_status_code(request.body, exc.status_code, exc)
|
||||||
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
except Error as exc:
|
except Error as exc:
|
||||||
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _response_handle_status_code(self, status_code: int, root_exc: Exception):
|
def _response_handle_status_code(self, request: dict, status_code: int, root_exc: Exception):
|
||||||
if status_code == HttpResponseNotFound.status_code:
|
if status_code == HttpResponseNotFound.status_code:
|
||||||
raise NotFoundSyncException("Object not found") from root_exc
|
raise NotFoundSyncException("Object not found") from root_exc
|
||||||
if status_code == HTTP_CONFLICT:
|
if status_code == HTTP_CONFLICT:
|
||||||
raise ObjectExistsSyncException("Object exists") from root_exc
|
raise ObjectExistsSyncException("Object exists") from root_exc
|
||||||
|
if status_code == HttpResponseBadRequest.status_code:
|
||||||
|
raise BadRequestSyncException("Bad request", request) from root_exc
|
||||||
|
|
||||||
def check_email_valid(self, *emails: str):
|
def check_email_valid(self, *emails: str):
|
||||||
for email in emails:
|
for email in emails:
|
||||||
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
|
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
|
||||||
raise TransientSyncException(f"Invalid email domain: {email}")
|
raise BadRequestSyncException(f"Invalid email domain: {email}")
|
||||||
|
|||||||
@ -1,28 +1,22 @@
|
|||||||
from deepmerge import always_merger
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
|
|
||||||
from authentik.core.expression.exceptions import (
|
|
||||||
PropertyMappingExpressionException,
|
|
||||||
SkipObjectException,
|
|
||||||
)
|
|
||||||
from authentik.core.models import Group
|
from authentik.core.models import Group
|
||||||
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
||||||
from authentik.enterprise.providers.google_workspace.models import (
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
GoogleWorkspaceDeleteAction,
|
GoogleWorkspaceProvider,
|
||||||
GoogleWorkspaceProviderGroup,
|
GoogleWorkspaceProviderGroup,
|
||||||
GoogleWorkspaceProviderMapping,
|
GoogleWorkspaceProviderMapping,
|
||||||
GoogleWorkspaceProviderUser,
|
GoogleWorkspaceProviderUser,
|
||||||
)
|
)
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
from authentik.lib.sync.outgoing.base import Direction
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
from authentik.lib.sync.outgoing.exceptions import (
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
NotFoundSyncException,
|
NotFoundSyncException,
|
||||||
ObjectExistsSyncException,
|
ObjectExistsSyncException,
|
||||||
StopSync,
|
|
||||||
TransientSyncException,
|
TransientSyncException,
|
||||||
)
|
)
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
|
||||||
|
|
||||||
class GoogleWorkspaceGroupClient(
|
class GoogleWorkspaceGroupClient(
|
||||||
@ -34,41 +28,21 @@ class GoogleWorkspaceGroupClient(
|
|||||||
connection_type_query = "group"
|
connection_type_query = "group"
|
||||||
can_discover = True
|
can_discover = True
|
||||||
|
|
||||||
def to_schema(self, obj: Group) -> dict:
|
def __init__(self, provider: GoogleWorkspaceProvider) -> None:
|
||||||
"""Convert authentik group"""
|
super().__init__(provider)
|
||||||
raw_google_group = {
|
self.mapper = PropertyMappingManager(
|
||||||
"email": f"{slugify(obj.name)}@{self.provider.default_group_email_domain}"
|
self.provider.property_mappings_group.all().order_by("name").select_subclasses(),
|
||||||
}
|
GoogleWorkspaceProviderMapping,
|
||||||
for mapping in (
|
["group", "provider", "connection"],
|
||||||
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
|
)
|
||||||
):
|
|
||||||
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
mapping: GoogleWorkspaceProviderMapping
|
|
||||||
value = mapping.evaluate(
|
|
||||||
user=None,
|
|
||||||
request=None,
|
|
||||||
group=obj,
|
|
||||||
provider=self.provider,
|
|
||||||
)
|
|
||||||
if value is None:
|
|
||||||
continue
|
|
||||||
always_merger.merge(raw_google_group, value)
|
|
||||||
except SkipObjectException as exc:
|
|
||||||
raise exc from exc
|
|
||||||
except (PropertyMappingExpressionException, ValueError) as exc:
|
|
||||||
# Value error can be raised when assigning invalid data to an attribute
|
|
||||||
Event.new(
|
|
||||||
EventAction.CONFIGURATION_ERROR,
|
|
||||||
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
|
||||||
mapping=mapping,
|
|
||||||
).save()
|
|
||||||
raise StopSync(exc, obj, mapping) from exc
|
|
||||||
if not raw_google_group:
|
|
||||||
raise StopSync(ValueError("No group mappings configured"), obj)
|
|
||||||
|
|
||||||
return raw_google_group
|
def to_schema(self, obj: Group, connection: GoogleWorkspaceProviderGroup) -> dict:
|
||||||
|
"""Convert authentik group"""
|
||||||
|
return super().to_schema(
|
||||||
|
obj,
|
||||||
|
connection=connection,
|
||||||
|
email=f"{slugify(obj.name)}@{self.provider.default_group_email_domain}",
|
||||||
|
)
|
||||||
|
|
||||||
def delete(self, obj: Group):
|
def delete(self, obj: Group):
|
||||||
"""Delete group"""
|
"""Delete group"""
|
||||||
@ -79,7 +53,7 @@ class GoogleWorkspaceGroupClient(
|
|||||||
self.logger.debug("Group does not exist in Google, skipping")
|
self.logger.debug("Group does not exist in Google, skipping")
|
||||||
return None
|
return None
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
if self.provider.group_delete_action == GoogleWorkspaceDeleteAction.DELETE:
|
if self.provider.group_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
self._request(
|
self._request(
|
||||||
self.directory_service.groups().delete(groupKey=google_group.google_id)
|
self.directory_service.groups().delete(groupKey=google_group.google_id)
|
||||||
)
|
)
|
||||||
@ -87,7 +61,7 @@ class GoogleWorkspaceGroupClient(
|
|||||||
|
|
||||||
def create(self, group: Group):
|
def create(self, group: Group):
|
||||||
"""Create group from scratch and create a connection object"""
|
"""Create group from scratch and create a connection object"""
|
||||||
google_group = self.to_schema(group)
|
google_group = self.to_schema(group, None)
|
||||||
self.check_email_valid(google_group["email"])
|
self.check_email_valid(google_group["email"])
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
try:
|
try:
|
||||||
@ -99,53 +73,49 @@ class GoogleWorkspaceGroupClient(
|
|||||||
group_data = self._request(
|
group_data = self._request(
|
||||||
self.directory_service.groups().get(groupKey=google_group["email"])
|
self.directory_service.groups().get(groupKey=google_group["email"])
|
||||||
)
|
)
|
||||||
GoogleWorkspaceProviderGroup.objects.create(
|
return GoogleWorkspaceProviderGroup.objects.create(
|
||||||
provider=self.provider, group=group, google_id=group_data["id"]
|
provider=self.provider,
|
||||||
|
group=group,
|
||||||
|
google_id=group_data["id"],
|
||||||
|
attributes=group_data,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
GoogleWorkspaceProviderGroup.objects.create(
|
return GoogleWorkspaceProviderGroup.objects.create(
|
||||||
provider=self.provider, group=group, google_id=response["id"]
|
provider=self.provider,
|
||||||
|
group=group,
|
||||||
|
google_id=response["id"],
|
||||||
|
attributes=response,
|
||||||
)
|
)
|
||||||
|
|
||||||
def update(self, group: Group, connection: GoogleWorkspaceProviderGroup):
|
def update(self, group: Group, connection: GoogleWorkspaceProviderGroup):
|
||||||
"""Update existing group"""
|
"""Update existing group"""
|
||||||
google_group = self.to_schema(group)
|
google_group = self.to_schema(group, connection)
|
||||||
self.check_email_valid(google_group["email"])
|
self.check_email_valid(google_group["email"])
|
||||||
try:
|
try:
|
||||||
return self._request(
|
response = self._request(
|
||||||
self.directory_service.groups().update(
|
self.directory_service.groups().update(
|
||||||
groupKey=connection.google_id,
|
groupKey=connection.google_id,
|
||||||
body=google_group,
|
body=google_group,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
connection.attributes = response
|
||||||
|
connection.save()
|
||||||
except NotFoundSyncException:
|
except NotFoundSyncException:
|
||||||
# Resource missing is handled by self.write, which will re-create the group
|
# Resource missing is handled by self.write, which will re-create the group
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def write(self, obj: Group):
|
def write(self, obj: Group):
|
||||||
google_group, created = super().write(obj)
|
google_group, created = super().write(obj)
|
||||||
if created:
|
self.create_sync_members(obj, google_group)
|
||||||
self.create_sync_members(obj, google_group)
|
return google_group, created
|
||||||
return google_group
|
|
||||||
|
|
||||||
def create_sync_members(self, obj: Group, google_group: dict):
|
def create_sync_members(self, obj: Group, google_group: GoogleWorkspaceProviderGroup):
|
||||||
"""Sync all members after a group was created"""
|
"""Sync all members after a group was created"""
|
||||||
users = list(obj.users.order_by("id").values_list("id", flat=True))
|
users = list(obj.users.order_by("id").values_list("id", flat=True))
|
||||||
connections = GoogleWorkspaceProviderUser.objects.filter(
|
connections = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
provider=self.provider, user__pk__in=users
|
provider=self.provider, user__pk__in=users
|
||||||
)
|
).values_list("google_id", flat=True)
|
||||||
for user in connections:
|
self._patch(google_group.google_id, Direction.add, connections)
|
||||||
try:
|
|
||||||
self._request(
|
|
||||||
self.directory_service.members().insert(
|
|
||||||
groupKey=google_group["id"],
|
|
||||||
body={
|
|
||||||
"email": user.google_id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except TransientSyncException:
|
|
||||||
continue
|
|
||||||
|
|
||||||
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
||||||
"""Update a groups members"""
|
"""Update a groups members"""
|
||||||
@ -242,4 +212,5 @@ class GoogleWorkspaceGroupClient(
|
|||||||
provider=self.provider,
|
provider=self.provider,
|
||||||
group=matching_authentik_group,
|
group=matching_authentik_group,
|
||||||
google_id=google_id,
|
google_id=google_id,
|
||||||
|
attributes=group,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,24 +1,18 @@
|
|||||||
from deepmerge import always_merger
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from authentik.core.expression.exceptions import (
|
|
||||||
PropertyMappingExpressionException,
|
|
||||||
SkipObjectException,
|
|
||||||
)
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
|
||||||
from authentik.enterprise.providers.google_workspace.models import (
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
GoogleWorkspaceDeleteAction,
|
GoogleWorkspaceProvider,
|
||||||
GoogleWorkspaceProviderMapping,
|
GoogleWorkspaceProviderMapping,
|
||||||
GoogleWorkspaceProviderUser,
|
GoogleWorkspaceProviderUser,
|
||||||
)
|
)
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
from authentik.lib.sync.outgoing.exceptions import (
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
ObjectExistsSyncException,
|
ObjectExistsSyncException,
|
||||||
StopSync,
|
|
||||||
TransientSyncException,
|
TransientSyncException,
|
||||||
)
|
)
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
from authentik.policies.utils import delete_none_values
|
from authentik.policies.utils import delete_none_values
|
||||||
|
|
||||||
|
|
||||||
@ -29,37 +23,17 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
|
|||||||
connection_type_query = "user"
|
connection_type_query = "user"
|
||||||
can_discover = True
|
can_discover = True
|
||||||
|
|
||||||
def to_schema(self, obj: User) -> dict:
|
def __init__(self, provider: GoogleWorkspaceProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.mapper = PropertyMappingManager(
|
||||||
|
self.provider.property_mappings.all().order_by("name").select_subclasses(),
|
||||||
|
GoogleWorkspaceProviderMapping,
|
||||||
|
["provider", "connection"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_schema(self, obj: User, connection: GoogleWorkspaceProviderUser) -> dict:
|
||||||
"""Convert authentik user"""
|
"""Convert authentik user"""
|
||||||
raw_google_user = {}
|
return delete_none_values(super().to_schema(obj, connection, primaryEmail=obj.email))
|
||||||
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
|
|
||||||
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
mapping: GoogleWorkspaceProviderMapping
|
|
||||||
value = mapping.evaluate(
|
|
||||||
user=obj,
|
|
||||||
request=None,
|
|
||||||
provider=self.provider,
|
|
||||||
)
|
|
||||||
if value is None:
|
|
||||||
continue
|
|
||||||
always_merger.merge(raw_google_user, value)
|
|
||||||
except SkipObjectException as exc:
|
|
||||||
raise exc from exc
|
|
||||||
except (PropertyMappingExpressionException, ValueError) as exc:
|
|
||||||
# Value error can be raised when assigning invalid data to an attribute
|
|
||||||
Event.new(
|
|
||||||
EventAction.CONFIGURATION_ERROR,
|
|
||||||
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
|
||||||
mapping=mapping,
|
|
||||||
).save()
|
|
||||||
raise StopSync(exc, obj, mapping) from exc
|
|
||||||
if not raw_google_user:
|
|
||||||
raise StopSync(ValueError("No user mappings configured"), obj)
|
|
||||||
if "primaryEmail" not in raw_google_user:
|
|
||||||
raw_google_user["primaryEmail"] = str(obj.email)
|
|
||||||
return delete_none_values(raw_google_user)
|
|
||||||
|
|
||||||
def delete(self, obj: User):
|
def delete(self, obj: User):
|
||||||
"""Delete user"""
|
"""Delete user"""
|
||||||
@ -71,11 +45,11 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
|
|||||||
return None
|
return None
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
response = None
|
response = None
|
||||||
if self.provider.user_delete_action == GoogleWorkspaceDeleteAction.DELETE:
|
if self.provider.user_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
response = self._request(
|
response = self._request(
|
||||||
self.directory_service.users().delete(userKey=google_user.google_id)
|
self.directory_service.users().delete(userKey=google_user.google_id)
|
||||||
)
|
)
|
||||||
elif self.provider.user_delete_action == GoogleWorkspaceDeleteAction.SUSPEND:
|
elif self.provider.user_delete_action == OutgoingSyncDeleteAction.SUSPEND:
|
||||||
response = self._request(
|
response = self._request(
|
||||||
self.directory_service.users().update(
|
self.directory_service.users().update(
|
||||||
userKey=google_user.google_id, body={"suspended": True}
|
userKey=google_user.google_id, body={"suspended": True}
|
||||||
@ -86,7 +60,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
|
|||||||
|
|
||||||
def create(self, user: User):
|
def create(self, user: User):
|
||||||
"""Create user from scratch and create a connection object"""
|
"""Create user from scratch and create a connection object"""
|
||||||
google_user = self.to_schema(user)
|
google_user = self.to_schema(user, None)
|
||||||
self.check_email_valid(
|
self.check_email_valid(
|
||||||
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
||||||
)
|
)
|
||||||
@ -95,25 +69,30 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
|
|||||||
response = self._request(self.directory_service.users().insert(body=google_user))
|
response = self._request(self.directory_service.users().insert(body=google_user))
|
||||||
except ObjectExistsSyncException:
|
except ObjectExistsSyncException:
|
||||||
# user already exists in google workspace, so we can connect them manually
|
# user already exists in google workspace, so we can connect them manually
|
||||||
GoogleWorkspaceProviderUser.objects.create(
|
return GoogleWorkspaceProviderUser.objects.create(
|
||||||
provider=self.provider, user=user, google_id=user.email
|
provider=self.provider, user=user, google_id=user.email, attributes={}
|
||||||
)
|
)
|
||||||
except TransientSyncException as exc:
|
except TransientSyncException as exc:
|
||||||
raise exc
|
raise exc
|
||||||
else:
|
else:
|
||||||
GoogleWorkspaceProviderUser.objects.create(
|
return GoogleWorkspaceProviderUser.objects.create(
|
||||||
provider=self.provider, user=user, google_id=response["primaryEmail"]
|
provider=self.provider,
|
||||||
|
user=user,
|
||||||
|
google_id=response["primaryEmail"],
|
||||||
|
attributes=response,
|
||||||
)
|
)
|
||||||
|
|
||||||
def update(self, user: User, connection: GoogleWorkspaceProviderUser):
|
def update(self, user: User, connection: GoogleWorkspaceProviderUser):
|
||||||
"""Update existing user"""
|
"""Update existing user"""
|
||||||
google_user = self.to_schema(user)
|
google_user = self.to_schema(user, connection)
|
||||||
self.check_email_valid(
|
self.check_email_valid(
|
||||||
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
|
||||||
)
|
)
|
||||||
self._request(
|
response = self._request(
|
||||||
self.directory_service.users().update(userKey=connection.google_id, body=google_user)
|
self.directory_service.users().update(userKey=connection.google_id, body=google_user)
|
||||||
)
|
)
|
||||||
|
connection.attributes = response
|
||||||
|
connection.save()
|
||||||
|
|
||||||
def discover(self):
|
def discover(self):
|
||||||
"""Iterate through all users and connect them with authentik users if possible"""
|
"""Iterate through all users and connect them with authentik users if possible"""
|
||||||
@ -138,4 +117,5 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
|
|||||||
provider=self.provider,
|
provider=self.provider,
|
||||||
user=matching_authentik_user,
|
user=matching_authentik_user,
|
||||||
google_id=email,
|
google_id=email,
|
||||||
|
attributes=user,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -0,0 +1,179 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-09 12:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
replaces = [
|
||||||
|
("authentik_providers_google_workspace", "0001_initial"),
|
||||||
|
(
|
||||||
|
"authentik_providers_google_workspace",
|
||||||
|
"0002_alter_googleworkspaceprovidergroup_options_and_more",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderMapping",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"propertymapping_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider Mapping",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Mappings",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.propertymapping",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("delegated_subject", models.EmailField(max_length=254)),
|
||||||
|
("credentials", models.JSONField()),
|
||||||
|
(
|
||||||
|
"scopes",
|
||||||
|
models.TextField(
|
||||||
|
default="https://www.googleapis.com/auth/admin.directory.user,https://www.googleapis.com/auth/admin.directory.group,https://www.googleapis.com/auth/admin.directory.group.member,https://www.googleapis.com/auth/admin.directory.domain.readonly"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("default_group_email_domain", models.TextField()),
|
||||||
|
("exclude_users_service_account", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"user_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"group_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"filter_group",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"property_mappings_group",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Property mappings used for group creation/updating.",
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider",
|
||||||
|
"verbose_name_plural": "Google Workspace Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderGroup",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "group", "provider")},
|
||||||
|
"verbose_name": "Google Workspace Provider Group",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Groups",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GoogleWorkspaceProviderUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("google_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_google_workspace.googleworkspaceprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("google_id", "user", "provider")},
|
||||||
|
"verbose_name": "Google Workspace Provider User",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Users",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-08 14:35
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_google_workspace", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="googleworkspaceprovidergroup",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider Group",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Groups",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="googleworkspaceprovideruser",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Google Workspace Provider User",
|
||||||
|
"verbose_name_plural": "Google Workspace Provider Users",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-23 20:48
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
(
|
||||||
|
"authentik_providers_google_workspace",
|
||||||
|
"0001_squashed_0002_alter_googleworkspaceprovidergroup_options_and_more",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="googleworkspaceprovidergroup",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="googleworkspaceprovideruser",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -5,6 +5,7 @@ from uuid import uuid4
|
|||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
|
from django.templatetags.static import static
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from google.oauth2.service_account import Credentials
|
from google.oauth2.service_account import Credentials
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
@ -16,8 +17,9 @@ from authentik.core.models import (
|
|||||||
User,
|
User,
|
||||||
UserTypes,
|
UserTypes,
|
||||||
)
|
)
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction, OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
def default_scopes() -> list[str]:
|
def default_scopes() -> list[str]:
|
||||||
@ -29,15 +31,6 @@ def default_scopes() -> list[str]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class GoogleWorkspaceDeleteAction(models.TextChoices):
|
|
||||||
"""Action taken when a user/group is deleted in authentik. Suspend is not available for groups,
|
|
||||||
and will be treated as `do_nothing`"""
|
|
||||||
|
|
||||||
DO_NOTHING = "do_nothing"
|
|
||||||
DELETE = "delete"
|
|
||||||
SUSPEND = "suspend"
|
|
||||||
|
|
||||||
|
|
||||||
class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||||
"""Sync users from authentik into Google Workspace."""
|
"""Sync users from authentik into Google Workspace."""
|
||||||
|
|
||||||
@ -48,10 +41,10 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
|||||||
default_group_email_domain = models.TextField()
|
default_group_email_domain = models.TextField()
|
||||||
exclude_users_service_account = models.BooleanField(default=False)
|
exclude_users_service_account = models.BooleanField(default=False)
|
||||||
user_delete_action = models.TextField(
|
user_delete_action = models.TextField(
|
||||||
choices=GoogleWorkspaceDeleteAction.choices, default=GoogleWorkspaceDeleteAction.DELETE
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
)
|
)
|
||||||
group_delete_action = models.TextField(
|
group_delete_action = models.TextField(
|
||||||
choices=GoogleWorkspaceDeleteAction.choices, default=GoogleWorkspaceDeleteAction.DELETE
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
)
|
)
|
||||||
|
|
||||||
filter_group = models.ForeignKey(
|
filter_group = models.ForeignKey(
|
||||||
@ -106,6 +99,10 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
|||||||
).with_subject(self.delegated_subject),
|
).with_subject(self.delegated_subject),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return static("authentik/sources/google.svg")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component(self) -> str:
|
def component(self) -> str:
|
||||||
return "ak-provider-google-workspace-form"
|
return "ak-provider-google-workspace-form"
|
||||||
@ -113,10 +110,10 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
|||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.enterprise.providers.google_workspace.api.providers import (
|
from authentik.enterprise.providers.google_workspace.api.providers import (
|
||||||
GoogleProviderSerializer,
|
GoogleWorkspaceProviderSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
return GoogleProviderSerializer
|
return GoogleWorkspaceProviderSerializer
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Google Workspace Provider {self.name}"
|
return f"Google Workspace Provider {self.name}"
|
||||||
@ -136,10 +133,10 @@ class GoogleWorkspaceProviderMapping(PropertyMapping):
|
|||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
||||||
GoogleProviderMappingSerializer,
|
GoogleWorkspaceProviderMappingSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
return GoogleProviderMappingSerializer
|
return GoogleWorkspaceProviderMappingSerializer
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Google Workspace Provider Mapping {self.name}"
|
return f"Google Workspace Provider Mapping {self.name}"
|
||||||
@ -149,31 +146,53 @@ class GoogleWorkspaceProviderMapping(PropertyMapping):
|
|||||||
verbose_name_plural = _("Google Workspace Provider Mappings")
|
verbose_name_plural = _("Google Workspace Provider Mappings")
|
||||||
|
|
||||||
|
|
||||||
class GoogleWorkspaceProviderUser(models.Model):
|
class GoogleWorkspaceProviderUser(SerializerModel):
|
||||||
"""Mapping of a user and provider to a Google user ID"""
|
"""Mapping of a user and provider to a Google user ID"""
|
||||||
|
|
||||||
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
google_id = models.TextField()
|
google_id = models.TextField()
|
||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.users import (
|
||||||
|
GoogleWorkspaceProviderUserSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderUserSerializer
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider User")
|
||||||
|
verbose_name_plural = _("Google Workspace Provider Users")
|
||||||
unique_together = (("google_id", "user", "provider"),)
|
unique_together = (("google_id", "user", "provider"),)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"Google Workspace User {self.user_id} to {self.provider_id}"
|
return f"Google Workspace Provider User {self.user_id} to {self.provider_id}"
|
||||||
|
|
||||||
|
|
||||||
class GoogleWorkspaceProviderGroup(models.Model):
|
class GoogleWorkspaceProviderGroup(SerializerModel):
|
||||||
"""Mapping of a group and provider to a Google group ID"""
|
"""Mapping of a group and provider to a Google group ID"""
|
||||||
|
|
||||||
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
google_id = models.TextField()
|
google_id = models.TextField()
|
||||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.groups import (
|
||||||
|
GoogleWorkspaceProviderGroupSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return GoogleWorkspaceProviderGroupSerializer
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
verbose_name = _("Google Workspace Provider Group")
|
||||||
|
verbose_name_plural = _("Google Workspace Provider Groups")
|
||||||
unique_together = (("google_id", "group", "provider"),)
|
unique_together = (("google_id", "group", "provider"),)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"Google Workspace Group {self.group_id} to {self.provider_id}"
|
return f"Google Workspace Provider Group {self.group_id} to {self.provider_id}"
|
||||||
|
|||||||
@ -2,18 +2,21 @@
|
|||||||
|
|
||||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||||
from authentik.events.system_tasks import SystemTask
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||||
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
|
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
def google_workspace_sync_objects(*args, **kwargs):
|
def google_workspace_sync_objects(*args, **kwargs):
|
||||||
return sync_tasks.sync_objects(*args, **kwargs)
|
return sync_tasks.sync_objects(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(base=SystemTask, bind=True)
|
@CELERY_APP.task(
|
||||||
|
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||||
|
)
|
||||||
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
|
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
|
||||||
"""Run full sync for Google Workspace provider"""
|
"""Run full sync for Google Workspace provider"""
|
||||||
return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
|
return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
|
||||||
@ -24,11 +27,11 @@ def google_workspace_sync_all():
|
|||||||
return sync_tasks.sync_all(google_workspace_sync)
|
return sync_tasks.sync_all(google_workspace_sync)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
def google_workspace_sync_direct(*args, **kwargs):
|
def google_workspace_sync_direct(*args, **kwargs):
|
||||||
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
def google_workspace_sync_m2m(*args, **kwargs):
|
def google_workspace_sync_m2m(*args, **kwargs):
|
||||||
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||||
|
|||||||
@ -9,7 +9,6 @@ from authentik.core.models import Application, Group, User
|
|||||||
from authentik.core.tests.utils import create_test_user
|
from authentik.core.tests.utils import create_test_user
|
||||||
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
||||||
from authentik.enterprise.providers.google_workspace.models import (
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
GoogleWorkspaceDeleteAction,
|
|
||||||
GoogleWorkspaceProvider,
|
GoogleWorkspaceProvider,
|
||||||
GoogleWorkspaceProviderGroup,
|
GoogleWorkspaceProviderGroup,
|
||||||
GoogleWorkspaceProviderMapping,
|
GoogleWorkspaceProviderMapping,
|
||||||
@ -17,6 +16,7 @@ from authentik.enterprise.providers.google_workspace.models import (
|
|||||||
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
from authentik.lib.tests.utils import load_fixture
|
from authentik.lib.tests.utils import load_fixture
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
@ -82,6 +82,27 @@ class GoogleWorkspaceGroupTests(TestCase):
|
|||||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
self.assertEqual(len(http.requests()), 2)
|
self.assertEqual(len(http.requests()), 2)
|
||||||
|
|
||||||
|
def test_group_not_created(self):
|
||||||
|
"""Test without group property mappings, no group is created"""
|
||||||
|
self.provider.property_mappings_group.clear()
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
google_group = GoogleWorkspaceProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(google_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 1)
|
||||||
|
|
||||||
def test_group_create_update(self):
|
def test_group_create_update(self):
|
||||||
"""Test group updating"""
|
"""Test group updating"""
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
@ -240,7 +261,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
|||||||
|
|
||||||
def test_group_create_delete_do_nothing(self):
|
def test_group_create_delete_do_nothing(self):
|
||||||
"""Test group deletion (delete action = do nothing)"""
|
"""Test group deletion (delete action = do nothing)"""
|
||||||
self.provider.group_delete_action = GoogleWorkspaceDeleteAction.DO_NOTHING
|
self.provider.group_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
self.provider.save()
|
self.provider.save()
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
http = MockHTTP()
|
http = MockHTTP()
|
||||||
|
|||||||
@ -9,7 +9,6 @@ from authentik.blueprints.tests import apply_blueprint
|
|||||||
from authentik.core.models import Application, Group, User
|
from authentik.core.models import Application, Group, User
|
||||||
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
|
||||||
from authentik.enterprise.providers.google_workspace.models import (
|
from authentik.enterprise.providers.google_workspace.models import (
|
||||||
GoogleWorkspaceDeleteAction,
|
|
||||||
GoogleWorkspaceProvider,
|
GoogleWorkspaceProvider,
|
||||||
GoogleWorkspaceProviderMapping,
|
GoogleWorkspaceProviderMapping,
|
||||||
GoogleWorkspaceProviderUser,
|
GoogleWorkspaceProviderUser,
|
||||||
@ -17,6 +16,7 @@ from authentik.enterprise.providers.google_workspace.models import (
|
|||||||
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
from authentik.lib.tests.utils import load_fixture
|
from authentik.lib.tests.utils import load_fixture
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
@ -86,6 +86,31 @@ class GoogleWorkspaceUserTests(TestCase):
|
|||||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
self.assertEqual(len(http.requests()), 2)
|
self.assertEqual(len(http.requests()), 2)
|
||||||
|
|
||||||
|
def test_user_not_created(self):
|
||||||
|
"""Test without property mappings, no group is created"""
|
||||||
|
self.provider.property_mappings.clear()
|
||||||
|
uid = generate_id()
|
||||||
|
http = MockHTTP()
|
||||||
|
http.add_response(
|
||||||
|
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
|
||||||
|
domains_list_v1_mock,
|
||||||
|
)
|
||||||
|
with patch(
|
||||||
|
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||||
|
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
google_user = GoogleWorkspaceProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(google_user)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
self.assertEqual(len(http.requests()), 1)
|
||||||
|
|
||||||
def test_user_create_update(self):
|
def test_user_create_update(self):
|
||||||
"""Test user updating"""
|
"""Test user updating"""
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
@ -160,7 +185,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
|||||||
|
|
||||||
def test_user_create_delete_suspend(self):
|
def test_user_create_delete_suspend(self):
|
||||||
"""Test user deletion (delete action = Suspend)"""
|
"""Test user deletion (delete action = Suspend)"""
|
||||||
self.provider.user_delete_action = GoogleWorkspaceDeleteAction.SUSPEND
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.SUSPEND
|
||||||
self.provider.save()
|
self.provider.save()
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
http = MockHTTP()
|
http = MockHTTP()
|
||||||
@ -209,7 +234,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
|||||||
|
|
||||||
def test_user_create_delete_do_nothing(self):
|
def test_user_create_delete_do_nothing(self):
|
||||||
"""Test user deletion (delete action = do nothing)"""
|
"""Test user deletion (delete action = do nothing)"""
|
||||||
self.provider.user_delete_action = GoogleWorkspaceDeleteAction.DO_NOTHING
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
self.provider.save()
|
self.provider.save()
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
http = MockHTTP()
|
http = MockHTTP()
|
||||||
|
|||||||
@ -1,11 +1,21 @@
|
|||||||
"""google provider urls"""
|
"""google provider urls"""
|
||||||
|
|
||||||
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
from authentik.enterprise.providers.google_workspace.api.groups import (
|
||||||
GoogleProviderMappingViewSet,
|
GoogleWorkspaceProviderGroupViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
|
||||||
|
GoogleWorkspaceProviderMappingViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.providers import (
|
||||||
|
GoogleWorkspaceProviderViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.google_workspace.api.users import (
|
||||||
|
GoogleWorkspaceProviderUserViewSet,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.providers.google_workspace.api.providers import GoogleProviderViewSet
|
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
("providers/google_workspace", GoogleProviderViewSet),
|
("providers/google_workspace", GoogleWorkspaceProviderViewSet),
|
||||||
("propertymappings/provider/google_workspace", GoogleProviderMappingViewSet),
|
("providers/google_workspace_users", GoogleWorkspaceProviderUserViewSet),
|
||||||
|
("providers/google_workspace_groups", GoogleWorkspaceProviderGroupViewSet),
|
||||||
|
("propertymappings/provider/google_workspace", GoogleWorkspaceProviderMappingViewSet),
|
||||||
]
|
]
|
||||||
|
|||||||
45
authentik/enterprise/providers/microsoft_entra/api/groups.py
Normal file
45
authentik/enterprise/providers/microsoft_entra/api/groups.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
"""MicrosoftEntraProviderGroup API Views"""
|
||||||
|
|
||||||
|
from rest_framework import mixins
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.core.api.users import UserGroupSerializer
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderGroup
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroupSerializer(ModelSerializer):
|
||||||
|
"""MicrosoftEntraProviderGroup Serializer"""
|
||||||
|
|
||||||
|
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = MicrosoftEntraProviderGroup
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"microsoft_id",
|
||||||
|
"group",
|
||||||
|
"group_obj",
|
||||||
|
"provider",
|
||||||
|
"attributes",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroupViewSet(
|
||||||
|
mixins.CreateModelMixin,
|
||||||
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.DestroyModelMixin,
|
||||||
|
UsedByMixin,
|
||||||
|
mixins.ListModelMixin,
|
||||||
|
GenericViewSet,
|
||||||
|
):
|
||||||
|
"""MicrosoftEntraProviderGroup Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderGroup.objects.all().select_related("group")
|
||||||
|
serializer_class = MicrosoftEntraProviderGroupSerializer
|
||||||
|
filterset_fields = ["provider__id", "group__name", "group__group_uuid"]
|
||||||
|
search_fields = ["provider__name", "group__name"]
|
||||||
|
ordering = ["group__name"]
|
||||||
@ -0,0 +1,39 @@
|
|||||||
|
"""microsoft Property mappings API Views"""
|
||||||
|
|
||||||
|
from django_filters.filters import AllValuesMultipleFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderMapping
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingSerializer(PropertyMappingSerializer):
|
||||||
|
"""MicrosoftEntraProviderMapping Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProviderMapping
|
||||||
|
fields = PropertyMappingSerializer.Meta.fields
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingFilter(FilterSet):
|
||||||
|
"""Filter for MicrosoftEntraProviderMapping"""
|
||||||
|
|
||||||
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProviderMapping
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProviderMapping Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderMapping.objects.all()
|
||||||
|
serializer_class = MicrosoftEntraProviderMappingSerializer
|
||||||
|
filterset_class = MicrosoftEntraProviderMappingFilter
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
@ -0,0 +1,52 @@
|
|||||||
|
"""Microsoft Provider API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
|
"""MicrosoftEntraProvider Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MicrosoftEntraProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"property_mappings",
|
||||||
|
"property_mappings_group",
|
||||||
|
"component",
|
||||||
|
"assigned_backchannel_application_slug",
|
||||||
|
"assigned_backchannel_application_name",
|
||||||
|
"verbose_name",
|
||||||
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
|
"client_id",
|
||||||
|
"client_secret",
|
||||||
|
"tenant_id",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"filter_group",
|
||||||
|
"user_delete_action",
|
||||||
|
"group_delete_action",
|
||||||
|
]
|
||||||
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
|
||||||
|
"""MicrosoftEntraProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProvider.objects.all()
|
||||||
|
serializer_class = MicrosoftEntraProviderSerializer
|
||||||
|
filterset_fields = [
|
||||||
|
"name",
|
||||||
|
"exclude_users_service_account",
|
||||||
|
"filter_group",
|
||||||
|
]
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
|
sync_single_task = microsoft_entra_sync
|
||||||
45
authentik/enterprise/providers/microsoft_entra/api/users.py
Normal file
45
authentik/enterprise/providers/microsoft_entra/api/users.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
"""MicrosoftEntraProviderUser API Views"""
|
||||||
|
|
||||||
|
from rest_framework import mixins
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderUser
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUserSerializer(ModelSerializer):
|
||||||
|
"""MicrosoftEntraProviderUser Serializer"""
|
||||||
|
|
||||||
|
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = MicrosoftEntraProviderUser
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"microsoft_id",
|
||||||
|
"user",
|
||||||
|
"user_obj",
|
||||||
|
"provider",
|
||||||
|
"attributes",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUserViewSet(
|
||||||
|
mixins.CreateModelMixin,
|
||||||
|
mixins.RetrieveModelMixin,
|
||||||
|
mixins.DestroyModelMixin,
|
||||||
|
UsedByMixin,
|
||||||
|
mixins.ListModelMixin,
|
||||||
|
GenericViewSet,
|
||||||
|
):
|
||||||
|
"""MicrosoftEntraProviderUser Viewset"""
|
||||||
|
|
||||||
|
queryset = MicrosoftEntraProviderUser.objects.all().select_related("user")
|
||||||
|
serializer_class = MicrosoftEntraProviderUserSerializer
|
||||||
|
filterset_fields = ["provider__id", "user__username", "user__id"]
|
||||||
|
search_fields = ["provider__name", "user__username"]
|
||||||
|
ordering = ["user__username"]
|
||||||
9
authentik/enterprise/providers/microsoft_entra/apps.py
Normal file
9
authentik/enterprise/providers/microsoft_entra/apps.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikEnterpriseProviderMicrosoftEntraConfig(EnterpriseConfig):
|
||||||
|
|
||||||
|
name = "authentik.enterprise.providers.microsoft_entra"
|
||||||
|
label = "authentik_providers_microsoft_entra"
|
||||||
|
verbose_name = "authentik Enterprise.Providers.Microsoft Entra"
|
||||||
|
default = True
|
||||||
110
authentik/enterprise/providers/microsoft_entra/clients/base.py
Normal file
110
authentik/enterprise/providers/microsoft_entra/clients/base.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
from asyncio import run
|
||||||
|
from collections.abc import Coroutine
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from azure.core.exceptions import (
|
||||||
|
ClientAuthenticationError,
|
||||||
|
ServiceRequestError,
|
||||||
|
ServiceResponseError,
|
||||||
|
)
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.db.models import Model
|
||||||
|
from django.http import HttpResponseBadRequest, HttpResponseNotFound
|
||||||
|
from kiota_abstractions.api_error import APIError
|
||||||
|
from kiota_authentication_azure.azure_identity_authentication_provider import (
|
||||||
|
AzureIdentityAuthenticationProvider,
|
||||||
|
)
|
||||||
|
from kiota_http.kiota_client_factory import KiotaClientFactory
|
||||||
|
from msgraph.generated.models.entity import Entity
|
||||||
|
from msgraph.generated.models.o_data_errors.o_data_error import ODataError
|
||||||
|
from msgraph.graph_request_adapter import GraphRequestAdapter, options
|
||||||
|
from msgraph.graph_service_client import GraphServiceClient
|
||||||
|
from msgraph_core import GraphClientFactory
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.events.utils import sanitize_item
|
||||||
|
from authentik.lib.sync.outgoing import HTTP_CONFLICT
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_request_adapter(
|
||||||
|
credentials: ClientSecretCredential, scopes: list[str] | None = None
|
||||||
|
) -> GraphRequestAdapter:
|
||||||
|
if scopes:
|
||||||
|
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials, scopes=scopes)
|
||||||
|
else:
|
||||||
|
auth_provider = AzureIdentityAuthenticationProvider(credentials=credentials)
|
||||||
|
|
||||||
|
return GraphRequestAdapter(
|
||||||
|
auth_provider=auth_provider,
|
||||||
|
client=GraphClientFactory.create_with_default_middleware(
|
||||||
|
options=options, client=KiotaClientFactory.get_default_client()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
|
||||||
|
BaseOutgoingSyncClient[TModel, TConnection, TSchema, MicrosoftEntraProvider]
|
||||||
|
):
|
||||||
|
"""Base client for syncing to microsoft entra"""
|
||||||
|
|
||||||
|
domains: list
|
||||||
|
|
||||||
|
def __init__(self, provider: MicrosoftEntraProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.credentials = provider.microsoft_credentials()
|
||||||
|
self.__prefetch_domains()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client(self):
|
||||||
|
return GraphServiceClient(request_adapter=get_request_adapter(**self.credentials))
|
||||||
|
|
||||||
|
def _request[T](self, request: Coroutine[Any, Any, T]) -> T:
|
||||||
|
try:
|
||||||
|
return run(request)
|
||||||
|
except ClientAuthenticationError as exc:
|
||||||
|
raise StopSync(exc, None, None) from exc
|
||||||
|
except ODataError as exc:
|
||||||
|
raise StopSync(exc, None, None) from exc
|
||||||
|
except (ServiceRequestError, ServiceResponseError) as exc:
|
||||||
|
raise TransientSyncException("Failed to sent request") from exc
|
||||||
|
except APIError as exc:
|
||||||
|
if exc.response_status_code == HttpResponseNotFound.status_code:
|
||||||
|
raise NotFoundSyncException("Object not found") from exc
|
||||||
|
if exc.response_status_code == HttpResponseBadRequest.status_code:
|
||||||
|
raise BadRequestSyncException("Bad request", exc.response_headers) from exc
|
||||||
|
if exc.response_status_code == HTTP_CONFLICT:
|
||||||
|
raise ObjectExistsSyncException("Object exists", exc.response_headers) from exc
|
||||||
|
raise exc
|
||||||
|
|
||||||
|
def __prefetch_domains(self):
|
||||||
|
self.domains = []
|
||||||
|
organizations = self._request(self.client.organization.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for org in organizations.value:
|
||||||
|
self.domains.extend([x.name for x in org.verified_domains])
|
||||||
|
next_link = organizations.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
organizations = self._request(self.client.organization.with_url(next_link).get())
|
||||||
|
|
||||||
|
def check_email_valid(self, *emails: str):
|
||||||
|
for email in emails:
|
||||||
|
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
|
||||||
|
raise BadRequestSyncException(f"Invalid email domain: {email}")
|
||||||
|
|
||||||
|
def entity_as_dict(self, entity: Entity) -> dict:
|
||||||
|
"""Create a dictionary of a model instance, making sure to remove (known) things
|
||||||
|
we can't JSON serialize"""
|
||||||
|
raw_data = asdict(entity)
|
||||||
|
raw_data.pop("backing_store", None)
|
||||||
|
return sanitize_item(raw_data)
|
||||||
228
authentik/enterprise/providers/microsoft_entra/clients/groups.py
Normal file
228
authentik/enterprise/providers/microsoft_entra/clients/groups.py
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
from msgraph.generated.groups.groups_request_builder import GroupsRequestBuilder
|
||||||
|
from msgraph.generated.models.group import Group as MSGroup
|
||||||
|
from msgraph.generated.models.reference_create import ReferenceCreate
|
||||||
|
|
||||||
|
from authentik.core.models import Group
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.base import MicrosoftEntraSyncClient
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
NotFoundSyncException,
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraGroupClient(
|
||||||
|
MicrosoftEntraSyncClient[Group, MicrosoftEntraProviderGroup, MSGroup]
|
||||||
|
):
|
||||||
|
"""Microsoft client for groups"""
|
||||||
|
|
||||||
|
connection_type = MicrosoftEntraProviderGroup
|
||||||
|
connection_type_query = "group"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def __init__(self, provider: MicrosoftEntraProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.mapper = PropertyMappingManager(
|
||||||
|
self.provider.property_mappings_group.all().order_by("name").select_subclasses(),
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
["group", "provider", "connection"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_schema(self, obj: Group, connection: MicrosoftEntraProviderGroup) -> MSGroup:
|
||||||
|
"""Convert authentik group"""
|
||||||
|
raw_microsoft_group = super().to_schema(obj, connection)
|
||||||
|
try:
|
||||||
|
return MSGroup(**raw_microsoft_group)
|
||||||
|
except TypeError as exc:
|
||||||
|
raise StopSync(exc, obj) from exc
|
||||||
|
|
||||||
|
def delete(self, obj: Group):
|
||||||
|
"""Delete group"""
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=obj
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.debug("Group does not exist in Microsoft, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
if self.provider.group_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
self._request(self.client.groups.by_group_id(microsoft_group.microsoft_id).delete())
|
||||||
|
microsoft_group.delete()
|
||||||
|
|
||||||
|
def create(self, group: Group):
|
||||||
|
"""Create group from scratch and create a connection object"""
|
||||||
|
microsoft_group = self.to_schema(group, None)
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.client.groups.post(microsoft_group))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# group already exists in microsoft entra, so we can connect them manually
|
||||||
|
# for groups we need to fetch the group from microsoft as we connect on
|
||||||
|
# ID and not group email
|
||||||
|
query_params = GroupsRequestBuilder.GroupsRequestBuilderGetQueryParameters(
|
||||||
|
filter=f"displayName eq '{microsoft_group.display_name}'",
|
||||||
|
)
|
||||||
|
request_configuration = (
|
||||||
|
GroupsRequestBuilder.GroupsRequestBuilderGetRequestConfiguration(
|
||||||
|
query_parameters=query_params,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
group_data = self._request(self.client.groups.get(request_configuration))
|
||||||
|
if group_data.odata_count < 1 or len(group_data.value) < 1:
|
||||||
|
self.logger.warning(
|
||||||
|
"Group which could not be created also does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
ms_group = group_data.value[0]
|
||||||
|
return MicrosoftEntraProviderGroup.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
group=group,
|
||||||
|
microsoft_id=ms_group.id,
|
||||||
|
attributes=self.entity_as_dict(ms_group),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return MicrosoftEntraProviderGroup.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
group=group,
|
||||||
|
microsoft_id=response.id,
|
||||||
|
attributes=self.entity_as_dict(response),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, group: Group, connection: MicrosoftEntraProviderGroup):
|
||||||
|
"""Update existing group"""
|
||||||
|
microsoft_group = self.to_schema(group, connection)
|
||||||
|
microsoft_group.id = connection.microsoft_id
|
||||||
|
try:
|
||||||
|
response = self._request(
|
||||||
|
self.client.groups.by_group_id(connection.microsoft_id).patch(microsoft_group)
|
||||||
|
)
|
||||||
|
if response:
|
||||||
|
always_merger.merge(connection.attributes, self.entity_as_dict(response))
|
||||||
|
connection.save()
|
||||||
|
except NotFoundSyncException:
|
||||||
|
# Resource missing is handled by self.write, which will re-create the group
|
||||||
|
raise
|
||||||
|
|
||||||
|
def write(self, obj: Group):
|
||||||
|
microsoft_group, created = super().write(obj)
|
||||||
|
self.create_sync_members(obj, microsoft_group)
|
||||||
|
return microsoft_group, created
|
||||||
|
|
||||||
|
def create_sync_members(self, obj: Group, microsoft_group: MicrosoftEntraProviderGroup):
|
||||||
|
"""Sync all members after a group was created"""
|
||||||
|
users = list(obj.users.order_by("id").values_list("id", flat=True))
|
||||||
|
connections = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__pk__in=users
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.add, connections)
|
||||||
|
|
||||||
|
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
||||||
|
"""Update a groups members"""
|
||||||
|
if action == Direction.add:
|
||||||
|
return self._patch_add_users(group, users_set)
|
||||||
|
if action == Direction.remove:
|
||||||
|
return self._patch_remove_users(group, users_set)
|
||||||
|
|
||||||
|
def _patch(self, microsoft_group_id: str, direction: Direction, members: list[str]):
|
||||||
|
for user in members:
|
||||||
|
try:
|
||||||
|
if direction == Direction.add:
|
||||||
|
request_body = ReferenceCreate(
|
||||||
|
odata_id=f"https://graph.microsoft.com/v1.0/directoryObjects/{user}",
|
||||||
|
)
|
||||||
|
self._request(
|
||||||
|
self.client.groups.by_group_id(microsoft_group_id).members.ref.post(
|
||||||
|
request_body
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if direction == Direction.remove:
|
||||||
|
self._request(
|
||||||
|
self.client.groups.by_group_id(microsoft_group_id)
|
||||||
|
.members.by_directory_object_id(user)
|
||||||
|
.ref.delete()
|
||||||
|
)
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
pass
|
||||||
|
except TransientSyncException:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _patch_add_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Add users in users_set to group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.add, user_ids)
|
||||||
|
|
||||||
|
def _patch_remove_users(self, group: Group, users_set: set[int]):
|
||||||
|
"""Remove users in users_set from group"""
|
||||||
|
if len(users_set) < 1:
|
||||||
|
return
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
if not microsoft_group:
|
||||||
|
self.logger.warning(
|
||||||
|
"could not sync group membership, group does not exist", group=group
|
||||||
|
)
|
||||||
|
return
|
||||||
|
user_ids = list(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user__pk__in=users_set, provider=self.provider
|
||||||
|
).values_list("microsoft_id", flat=True)
|
||||||
|
)
|
||||||
|
if len(user_ids) < 1:
|
||||||
|
return
|
||||||
|
self._patch(microsoft_group.microsoft_id, Direction.remove, user_ids)
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all groups and connect them with authentik groups if possible"""
|
||||||
|
groups = self._request(self.client.groups.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for group in groups.value:
|
||||||
|
self._discover_single_group(group)
|
||||||
|
next_link = groups.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
groups = self._request(self.client.groups.with_url(next_link).get())
|
||||||
|
|
||||||
|
def _discover_single_group(self, group: MSGroup):
|
||||||
|
"""handle discovery of a single group"""
|
||||||
|
microsoft_name = group.unique_name
|
||||||
|
matching_authentik_group = (
|
||||||
|
self.provider.get_object_qs(Group).filter(name=microsoft_name).first()
|
||||||
|
)
|
||||||
|
if not matching_authentik_group:
|
||||||
|
return
|
||||||
|
MicrosoftEntraProviderGroup.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
group=matching_authentik_group,
|
||||||
|
microsoft_id=group.id,
|
||||||
|
attributes=self.entity_as_dict(group),
|
||||||
|
)
|
||||||
143
authentik/enterprise/providers/microsoft_entra/clients/users.py
Normal file
143
authentik/enterprise/providers/microsoft_entra/clients/users.py
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
from deepmerge import always_merger
|
||||||
|
from django.db import transaction
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.users.users_request_builder import UsersRequestBuilder
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.base import MicrosoftEntraSyncClient
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
ObjectExistsSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.policies.utils import delete_none_values
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProviderUser, MSUser]):
|
||||||
|
"""Sync authentik users into microsoft entra"""
|
||||||
|
|
||||||
|
connection_type = MicrosoftEntraProviderUser
|
||||||
|
connection_type_query = "user"
|
||||||
|
can_discover = True
|
||||||
|
|
||||||
|
def __init__(self, provider: MicrosoftEntraProvider) -> None:
|
||||||
|
super().__init__(provider)
|
||||||
|
self.mapper = PropertyMappingManager(
|
||||||
|
self.provider.property_mappings.all().order_by("name").select_subclasses(),
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
["provider", "connection"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_schema(self, obj: User, connection: MicrosoftEntraProviderUser) -> MSUser:
|
||||||
|
"""Convert authentik user"""
|
||||||
|
raw_microsoft_user = super().to_schema(obj, connection)
|
||||||
|
try:
|
||||||
|
return MSUser(**delete_none_values(raw_microsoft_user))
|
||||||
|
except TypeError as exc:
|
||||||
|
raise StopSync(exc, obj) from exc
|
||||||
|
|
||||||
|
def delete(self, obj: User):
|
||||||
|
"""Delete user"""
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=obj
|
||||||
|
).first()
|
||||||
|
if not microsoft_user:
|
||||||
|
self.logger.debug("User does not exist in Microsoft, skipping")
|
||||||
|
return None
|
||||||
|
with transaction.atomic():
|
||||||
|
response = None
|
||||||
|
if self.provider.user_delete_action == OutgoingSyncDeleteAction.DELETE:
|
||||||
|
response = self._request(
|
||||||
|
self.client.users.by_user_id(microsoft_user.microsoft_id).delete()
|
||||||
|
)
|
||||||
|
elif self.provider.user_delete_action == OutgoingSyncDeleteAction.SUSPEND:
|
||||||
|
response = self._request(
|
||||||
|
self.client.users.by_user_id(microsoft_user.microsoft_id).patch(
|
||||||
|
MSUser(account_enabled=False)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
microsoft_user.delete()
|
||||||
|
return response
|
||||||
|
|
||||||
|
def create(self, user: User):
|
||||||
|
"""Create user from scratch and create a connection object"""
|
||||||
|
microsoft_user = self.to_schema(user, None)
|
||||||
|
self.check_email_valid(microsoft_user.user_principal_name)
|
||||||
|
with transaction.atomic():
|
||||||
|
try:
|
||||||
|
response = self._request(self.client.users.post(microsoft_user))
|
||||||
|
except ObjectExistsSyncException:
|
||||||
|
# user already exists in microsoft entra, so we can connect them manually
|
||||||
|
query_params = UsersRequestBuilder.UsersRequestBuilderGetQueryParameters()(
|
||||||
|
filter=f"mail eq '{microsoft_user.mail}'",
|
||||||
|
)
|
||||||
|
request_configuration = (
|
||||||
|
UsersRequestBuilder.UsersRequestBuilderGetRequestConfiguration(
|
||||||
|
query_parameters=query_params,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
user_data = self._request(self.client.users.get(request_configuration))
|
||||||
|
if user_data.odata_count < 1 or len(user_data.value) < 1:
|
||||||
|
self.logger.warning(
|
||||||
|
"User which could not be created also does not exist", user=user
|
||||||
|
)
|
||||||
|
return
|
||||||
|
ms_user = user_data.value[0]
|
||||||
|
return MicrosoftEntraProviderUser.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=user,
|
||||||
|
microsoft_id=ms_user.id,
|
||||||
|
attributes=self.entity_as_dict(ms_user),
|
||||||
|
)
|
||||||
|
except TransientSyncException as exc:
|
||||||
|
raise exc
|
||||||
|
else:
|
||||||
|
print(self.entity_as_dict(response))
|
||||||
|
return MicrosoftEntraProviderUser.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=user,
|
||||||
|
microsoft_id=response.id,
|
||||||
|
attributes=self.entity_as_dict(response),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, user: User, connection: MicrosoftEntraProviderUser):
|
||||||
|
"""Update existing user"""
|
||||||
|
microsoft_user = self.to_schema(user, connection)
|
||||||
|
self.check_email_valid(microsoft_user.user_principal_name)
|
||||||
|
response = self._request(
|
||||||
|
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
|
||||||
|
)
|
||||||
|
if response:
|
||||||
|
always_merger.merge(connection.attributes, self.entity_as_dict(response))
|
||||||
|
connection.save()
|
||||||
|
|
||||||
|
def discover(self):
|
||||||
|
"""Iterate through all users and connect them with authentik users if possible"""
|
||||||
|
users = self._request(self.client.users.get())
|
||||||
|
next_link = True
|
||||||
|
while next_link:
|
||||||
|
for user in users.value:
|
||||||
|
self._discover_single_user(user)
|
||||||
|
next_link = users.odata_next_link
|
||||||
|
if not next_link:
|
||||||
|
break
|
||||||
|
users = self._request(self.client.users.with_url(next_link).get())
|
||||||
|
|
||||||
|
def _discover_single_user(self, user: MSUser):
|
||||||
|
"""handle discovery of a single user"""
|
||||||
|
matching_authentik_user = self.provider.get_object_qs(User).filter(email=user.mail).first()
|
||||||
|
if not matching_authentik_user:
|
||||||
|
return
|
||||||
|
MicrosoftEntraProviderUser.objects.get_or_create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=matching_authentik_user,
|
||||||
|
microsoft_id=user.id,
|
||||||
|
attributes=self.entity_as_dict(user),
|
||||||
|
)
|
||||||
@ -0,0 +1,165 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-08 14:35
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0035_alter_group_options_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderMapping",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"propertymapping_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider Mapping",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider Mappings",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.propertymapping",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("client_id", models.TextField()),
|
||||||
|
("client_secret", models.TextField()),
|
||||||
|
("tenant_id", models.TextField()),
|
||||||
|
("exclude_users_service_account", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"user_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"group_delete_action",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("do_nothing", "Do Nothing"),
|
||||||
|
("delete", "Delete"),
|
||||||
|
("suspend", "Suspend"),
|
||||||
|
],
|
||||||
|
default="delete",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"filter_group",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"property_mappings_group",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Property mappings used for group creation/updating.",
|
||||||
|
to="authentik_core.propertymapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderGroup",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("microsoft_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_microsoft_entra.microsoftentraprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider Group",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider Groups",
|
||||||
|
"unique_together": {("microsoft_id", "group", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MicrosoftEntraProviderUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("microsoft_id", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_microsoft_entra.microsoftentraprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Microsoft Entra Provider User",
|
||||||
|
"verbose_name_plural": "Microsoft Entra Provider User",
|
||||||
|
"unique_together": {("microsoft_id", "user", "provider")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-05-23 20:48
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_microsoft_entra", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="microsoftentraprovidergroup",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="microsoftentraprovideruser",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
]
|
||||||
187
authentik/enterprise/providers/microsoft_entra/models.py
Normal file
187
authentik/enterprise/providers/microsoft_entra/models.py
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
"""Microsoft Entra sync provider"""
|
||||||
|
|
||||||
|
from typing import Any, Self
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.templatetags.static import static
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
BackchannelProvider,
|
||||||
|
Group,
|
||||||
|
PropertyMapping,
|
||||||
|
User,
|
||||||
|
UserTypes,
|
||||||
|
)
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction, OutgoingSyncProvider
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||||
|
"""Sync users from authentik into Microsoft Entra."""
|
||||||
|
|
||||||
|
client_id = models.TextField()
|
||||||
|
client_secret = models.TextField()
|
||||||
|
tenant_id = models.TextField()
|
||||||
|
|
||||||
|
exclude_users_service_account = models.BooleanField(default=False)
|
||||||
|
user_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
group_delete_action = models.TextField(
|
||||||
|
choices=OutgoingSyncDeleteAction.choices, default=OutgoingSyncDeleteAction.DELETE
|
||||||
|
)
|
||||||
|
filter_group = models.ForeignKey(
|
||||||
|
"authentik_core.group", on_delete=models.SET_DEFAULT, default=None, null=True
|
||||||
|
)
|
||||||
|
|
||||||
|
property_mappings_group = models.ManyToManyField(
|
||||||
|
PropertyMapping,
|
||||||
|
default=None,
|
||||||
|
blank=True,
|
||||||
|
help_text=_("Property mappings used for group creation/updating."),
|
||||||
|
)
|
||||||
|
|
||||||
|
def client_for_model(
|
||||||
|
self, model: type[User | Group]
|
||||||
|
) -> BaseOutgoingSyncClient[User | Group, Any, Any, Self]:
|
||||||
|
if issubclass(model, User):
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.users import (
|
||||||
|
MicrosoftEntraUserClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraUserClient(self)
|
||||||
|
if issubclass(model, Group):
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.clients.groups import (
|
||||||
|
MicrosoftEntraGroupClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraGroupClient(self)
|
||||||
|
raise ValueError(f"Invalid model {model}")
|
||||||
|
|
||||||
|
def get_object_qs(self, type: type[User | Group]) -> QuerySet[User | Group]:
|
||||||
|
if type == User:
|
||||||
|
# Get queryset of all users with consistent ordering
|
||||||
|
# according to the provider's settings
|
||||||
|
base = User.objects.all().exclude_anonymous()
|
||||||
|
if self.exclude_users_service_account:
|
||||||
|
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
|
||||||
|
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
)
|
||||||
|
if self.filter_group:
|
||||||
|
base = base.filter(ak_groups__in=[self.filter_group])
|
||||||
|
return base.order_by("pk")
|
||||||
|
if type == Group:
|
||||||
|
# Get queryset of all groups with consistent ordering
|
||||||
|
return Group.objects.all().order_by("pk")
|
||||||
|
raise ValueError(f"Invalid type {type}")
|
||||||
|
|
||||||
|
def microsoft_credentials(self):
|
||||||
|
return {
|
||||||
|
"credentials": ClientSecretCredential(
|
||||||
|
self.tenant_id, self.client_id, self.client_secret
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return static("authentik/sources/azuread.svg")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-microsoft-entra-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.providers import (
|
||||||
|
MicrosoftEntraProviderSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Microsoft Entra Provider {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Providers")
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderMapping(PropertyMapping):
|
||||||
|
"""Map authentik data to outgoing Microsoft requests"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-property-mapping-microsoft-entra-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.property_mappings import (
|
||||||
|
MicrosoftEntraProviderMappingSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderMappingSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Microsoft Entra Provider Mapping {self.name}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider Mapping")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider Mappings")
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderUser(SerializerModel):
|
||||||
|
"""Mapping of a user and provider to a Microsoft user ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
microsoft_id = models.TextField()
|
||||||
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(MicrosoftEntraProvider, on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.users import (
|
||||||
|
MicrosoftEntraProviderUserSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderUserSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider User")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider User")
|
||||||
|
unique_together = (("microsoft_id", "user", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Microsoft Entra Provider User {self.user_id} to {self.provider_id}"
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraProviderGroup(SerializerModel):
|
||||||
|
"""Mapping of a group and provider to a Microsoft group ID"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
microsoft_id = models.TextField()
|
||||||
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
|
provider = models.ForeignKey(MicrosoftEntraProvider, on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.groups import (
|
||||||
|
MicrosoftEntraProviderGroupSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
return MicrosoftEntraProviderGroupSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Microsoft Entra Provider Group")
|
||||||
|
verbose_name_plural = _("Microsoft Entra Provider Groups")
|
||||||
|
unique_together = (("microsoft_id", "group", "provider"),)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Microsoft Entra Provider Group {self.group_id} to {self.provider_id}"
|
||||||
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""Microsoft Entra provider task Settings"""
|
||||||
|
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
|
||||||
|
CELERY_BEAT_SCHEDULE = {
|
||||||
|
"providers_microsoft_entra_sync": {
|
||||||
|
"task": "authentik.enterprise.providers.microsoft_entra.tasks.microsoft_entra_sync_all",
|
||||||
|
"schedule": crontab(minute=fqdn_rand("microsoft_entra_sync_all"), hour="*/4"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
},
|
||||||
|
}
|
||||||
16
authentik/enterprise/providers/microsoft_entra/signals.py
Normal file
16
authentik/enterprise/providers/microsoft_entra/signals.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Microsoft provider signals"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import (
|
||||||
|
microsoft_entra_sync,
|
||||||
|
microsoft_entra_sync_direct,
|
||||||
|
microsoft_entra_sync_m2m,
|
||||||
|
)
|
||||||
|
from authentik.lib.sync.outgoing.signals import register_signals
|
||||||
|
|
||||||
|
register_signals(
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
task_sync_single=microsoft_entra_sync,
|
||||||
|
task_sync_direct=microsoft_entra_sync_direct,
|
||||||
|
task_sync_m2m=microsoft_entra_sync_m2m,
|
||||||
|
)
|
||||||
37
authentik/enterprise/providers/microsoft_entra/tasks.py
Normal file
37
authentik/enterprise/providers/microsoft_entra/tasks.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""Microsoft Entra Provider tasks"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||||
|
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
sync_tasks = SyncTasks(MicrosoftEntraProvider)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_objects(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_objects(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(
|
||||||
|
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||||
|
)
|
||||||
|
def microsoft_entra_sync(self, provider_pk: int, *args, **kwargs):
|
||||||
|
"""Run full sync for Microsoft Entra provider"""
|
||||||
|
return sync_tasks.sync_single(self, provider_pk, microsoft_entra_sync_objects)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def microsoft_entra_sync_all():
|
||||||
|
return sync_tasks.sync_all(microsoft_entra_sync)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_direct(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||||
|
def microsoft_entra_sync_m2m(*args, **kwargs):
|
||||||
|
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||||
@ -0,0 +1,424 @@
|
|||||||
|
"""Microsoft Entra Group tests"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.test import TestCase
|
||||||
|
from msgraph.generated.models.group import Group as MSGroup
|
||||||
|
from msgraph.generated.models.group_collection_response import GroupCollectionResponse
|
||||||
|
from msgraph.generated.models.organization import Organization
|
||||||
|
from msgraph.generated.models.organization_collection_response import OrganizationCollectionResponse
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.models.user_collection_response import UserCollectionResponse
|
||||||
|
from msgraph.generated.models.verified_domain import VerifiedDomain
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.core.tests.utils import create_test_user
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderGroup,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraGroupTests(TestCase):
|
||||||
|
"""Microsoft Entra Group tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-microsoft-entra.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all groups and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple groups
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: MicrosoftEntraProvider = MicrosoftEntraProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_id(),
|
||||||
|
tenant_id=generate_id(),
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.creds = ClientSecretCredential(generate_id(), generate_id(), generate_id())
|
||||||
|
|
||||||
|
def test_group_create(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=generate_id())),
|
||||||
|
) as group_create,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_not_created(self):
|
||||||
|
"""Test without group property mappings, no group is created"""
|
||||||
|
self.provider.property_mappings_group.clear()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=generate_id())),
|
||||||
|
) as group_create,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(microsoft_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_not_called()
|
||||||
|
|
||||||
|
def test_group_create_update(self):
|
||||||
|
"""Test group updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_patch,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.name = "new name"
|
||||||
|
group.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_patch.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_delete(self):
|
||||||
|
"""Test group deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
ext_id = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.delete",
|
||||||
|
AsyncMock(return_value=MSGroup(id=ext_id)),
|
||||||
|
) as group_delete,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_delete.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_member_add(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.ref.ref_request_builder.RefRequestBuilder.post",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_add,
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
group_create.assert_called_once()
|
||||||
|
member_add.assert_called_once()
|
||||||
|
self.assertEqual(
|
||||||
|
member_add.call_args[0][0].odata_id,
|
||||||
|
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider,
|
||||||
|
).first().microsoft_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_group_create_member_remove(self):
|
||||||
|
"""Test group creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.ref.ref_request_builder.RefRequestBuilder.post",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_add,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.members.item.ref.ref_request_builder.RefRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as member_remove,
|
||||||
|
):
|
||||||
|
user = create_test_user(uid)
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
group.users.add(user)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
group.users.remove(user)
|
||||||
|
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
group_create.assert_called_once()
|
||||||
|
member_add.assert_called_once()
|
||||||
|
self.assertEqual(
|
||||||
|
member_add.call_args[0][0].odata_id,
|
||||||
|
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider,
|
||||||
|
).first().microsoft_id}",
|
||||||
|
)
|
||||||
|
member_remove.assert_called_once()
|
||||||
|
|
||||||
|
def test_group_create_delete_do_nothing(self):
|
||||||
|
"""Test group deletion (delete action = do nothing)"""
|
||||||
|
self.provider.group_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.delete",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
) as group_delete,
|
||||||
|
):
|
||||||
|
group = Group.objects.create(name=uid)
|
||||||
|
microsoft_group = MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group=group
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_group)
|
||||||
|
|
||||||
|
group.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
provider=self.provider, group__name=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
group_create.assert_called_once()
|
||||||
|
group_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test group discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_group = Group.objects.create(
|
||||||
|
name=uid,
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSGroup(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.item.group_item_request_builder.GroupItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSGroup(id=uid)),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=UserCollectionResponse(
|
||||||
|
value=[MSUser(mail=f"{uid}@goauthentik.io", id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as user_list,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=GroupCollectionResponse(
|
||||||
|
value=[MSGroup(display_name=uid, unique_name=uid, id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as group_list,
|
||||||
|
):
|
||||||
|
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
MicrosoftEntraProviderGroup.objects.filter(
|
||||||
|
group=different_group, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_list.assert_called_once()
|
||||||
|
group_list.assert_called_once()
|
||||||
@ -0,0 +1,373 @@
|
|||||||
|
"""Microsoft Entra User tests"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
from azure.identity.aio import ClientSecretCredential
|
||||||
|
from django.test import TestCase
|
||||||
|
from msgraph.generated.models.group_collection_response import GroupCollectionResponse
|
||||||
|
from msgraph.generated.models.organization import Organization
|
||||||
|
from msgraph.generated.models.organization_collection_response import OrganizationCollectionResponse
|
||||||
|
from msgraph.generated.models.user import User as MSUser
|
||||||
|
from msgraph.generated.models.user_collection_response import UserCollectionResponse
|
||||||
|
from msgraph.generated.models.verified_domain import VerifiedDomain
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application, Group, User
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||||
|
MicrosoftEntraProvider,
|
||||||
|
MicrosoftEntraProviderMapping,
|
||||||
|
MicrosoftEntraProviderUser,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.lib.sync.outgoing.models import OutgoingSyncDeleteAction
|
||||||
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
|
class MicrosoftEntraUserTests(TestCase):
|
||||||
|
"""Microsoft Entra User tests"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-microsoft-entra.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||||
|
# which will cause errors with multiple users
|
||||||
|
Tenant.objects.update(avatars="none")
|
||||||
|
User.objects.all().exclude_anonymous().delete()
|
||||||
|
Group.objects.all().delete()
|
||||||
|
self.provider: MicrosoftEntraProvider = MicrosoftEntraProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_id(),
|
||||||
|
tenant_id=generate_id(),
|
||||||
|
exclude_users_service_account=True,
|
||||||
|
)
|
||||||
|
self.app: Application = Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
self.provider.property_mappings.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/user"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.provider.property_mappings_group.add(
|
||||||
|
MicrosoftEntraProviderMapping.objects.get(
|
||||||
|
managed="goauthentik.io/providers/microsoft_entra/group"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.creds = ClientSecretCredential(generate_id(), generate_id(), generate_id())
|
||||||
|
|
||||||
|
def test_user_create(self):
|
||||||
|
"""Test user creation"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_not_created(self):
|
||||||
|
"""Test without property mappings, no group is created"""
|
||||||
|
self.provider.property_mappings.clear()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(microsoft_user)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_not_called()
|
||||||
|
|
||||||
|
def test_user_create_update(self):
|
||||||
|
"""Test user updating"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.name = "new name"
|
||||||
|
user.save()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_create_delete(self):
|
||||||
|
"""Test user deletion"""
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_delete.assert_called_once()
|
||||||
|
|
||||||
|
def test_user_create_delete_suspend(self):
|
||||||
|
"""Test user deletion (delete action = Suspend)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.SUSPEND
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_called_once()
|
||||||
|
self.assertFalse(user_patch.call_args[0][0].account_enabled)
|
||||||
|
user_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_user_create_delete_do_nothing(self):
|
||||||
|
"""Test user deletion (delete action = do nothing)"""
|
||||||
|
self.provider.user_delete_action = OutgoingSyncDeleteAction.DO_NOTHING
|
||||||
|
self.provider.save()
|
||||||
|
uid = generate_id()
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.post",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_create,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
) as user_patch,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.delete",
|
||||||
|
AsyncMock(),
|
||||||
|
) as user_delete,
|
||||||
|
):
|
||||||
|
user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
name=f"{uid} {uid}",
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
microsoft_user = MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user=user
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(microsoft_user)
|
||||||
|
|
||||||
|
user.delete()
|
||||||
|
self.assertFalse(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
provider=self.provider, user__username=uid
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
user_create.assert_called_once()
|
||||||
|
user_patch.assert_not_called()
|
||||||
|
user_delete.assert_not_called()
|
||||||
|
|
||||||
|
def test_sync_task(self):
|
||||||
|
"""Test user discovery"""
|
||||||
|
uid = generate_id()
|
||||||
|
self.app.backchannel_providers.remove(self.provider)
|
||||||
|
different_user = User.objects.create(
|
||||||
|
username=uid,
|
||||||
|
email=f"{uid}@goauthentik.io",
|
||||||
|
)
|
||||||
|
self.app.backchannel_providers.add(self.provider)
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"authentik.enterprise.providers.microsoft_entra.models.MicrosoftEntraProvider.microsoft_credentials",
|
||||||
|
MagicMock(return_value={"credentials": self.creds}),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.organization.organization_request_builder.OrganizationRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=OrganizationCollectionResponse(
|
||||||
|
value=[
|
||||||
|
Organization(verified_domains=[VerifiedDomain(name="goauthentik.io")])
|
||||||
|
]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.item.user_item_request_builder.UserItemRequestBuilder.patch",
|
||||||
|
AsyncMock(return_value=MSUser(id=generate_id())),
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.users.users_request_builder.UsersRequestBuilder.get",
|
||||||
|
AsyncMock(
|
||||||
|
return_value=UserCollectionResponse(
|
||||||
|
value=[MSUser(mail=f"{uid}@goauthentik.io", id=uid)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
) as user_list,
|
||||||
|
patch(
|
||||||
|
"msgraph.generated.groups.groups_request_builder.GroupsRequestBuilder.get",
|
||||||
|
AsyncMock(return_value=GroupCollectionResponse(value=[])),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||||
|
self.assertTrue(
|
||||||
|
MicrosoftEntraProviderUser.objects.filter(
|
||||||
|
user=different_user, provider=self.provider
|
||||||
|
).exists()
|
||||||
|
)
|
||||||
|
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||||
|
user_list.assert_called_once()
|
||||||
21
authentik/enterprise/providers/microsoft_entra/urls.py
Normal file
21
authentik/enterprise/providers/microsoft_entra/urls.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
"""microsoft provider urls"""
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.groups import (
|
||||||
|
MicrosoftEntraProviderGroupViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.property_mappings import (
|
||||||
|
MicrosoftEntraProviderMappingViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.providers import (
|
||||||
|
MicrosoftEntraProviderViewSet,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.microsoft_entra.api.users import (
|
||||||
|
MicrosoftEntraProviderUserViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("providers/microsoft_entra", MicrosoftEntraProviderViewSet),
|
||||||
|
("providers/microsoft_entra_users", MicrosoftEntraProviderUserViewSet),
|
||||||
|
("providers/microsoft_entra_groups", MicrosoftEntraProviderGroupViewSet),
|
||||||
|
("propertymappings/provider/microsoft_entra", MicrosoftEntraProviderMappingViewSet),
|
||||||
|
]
|
||||||
@ -7,7 +7,7 @@ from drf_spectacular.utils import extend_schema_field
|
|||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import JSONDictField
|
from authentik.core.api.utils import JSONDictField
|
||||||
from authentik.enterprise.providers.rac.models import RACPropertyMapping
|
from authentik.enterprise.providers.rac.models import RACPropertyMapping
|
||||||
|
|||||||
@ -7,6 +7,7 @@ from deepmerge import always_merger
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
from django.templatetags.static import static
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
@ -63,6 +64,10 @@ class RACProvider(Provider):
|
|||||||
Can return None for providers that are not URL-based"""
|
Can return None for providers that are not URL-based"""
|
||||||
return "goauthentik.io://providers/rac/launch"
|
return "goauthentik.io://providers/rac/launch"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return static("authentik/sources/rac.svg")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component(self) -> str:
|
def component(self) -> str:
|
||||||
return "ak-provider-rac-form"
|
return "ak-provider-rac-form"
|
||||||
|
|||||||
@ -15,6 +15,7 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
TENANT_APPS = [
|
TENANT_APPS = [
|
||||||
"authentik.enterprise.audit",
|
"authentik.enterprise.audit",
|
||||||
"authentik.enterprise.providers.google_workspace",
|
"authentik.enterprise.providers.google_workspace",
|
||||||
|
"authentik.enterprise.providers.microsoft_entra",
|
||||||
"authentik.enterprise.providers.rac",
|
"authentik.enterprise.providers.rac",
|
||||||
"authentik.enterprise.stages.source",
|
"authentik.enterprise.stages.source",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -18,9 +18,12 @@ class SourceStageSerializer(EnterpriseRequiredMixin, StageSerializer):
|
|||||||
source = Source.objects.filter(pk=_source.pk).select_subclasses().first()
|
source = Source.objects.filter(pk=_source.pk).select_subclasses().first()
|
||||||
if not source:
|
if not source:
|
||||||
raise ValidationError("Invalid source")
|
raise ValidationError("Invalid source")
|
||||||
login_button = source.ui_login_button(self.context["request"])
|
if "request" in self.context:
|
||||||
if not login_button:
|
login_button = source.ui_login_button(self.context["request"])
|
||||||
raise ValidationError("Invalid source selected, only web-based sources are supported.")
|
if not login_button:
|
||||||
|
raise ValidationError(
|
||||||
|
"Invalid source selected, only web-based sources are supported."
|
||||||
|
)
|
||||||
return source
|
return source
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|||||||
@ -54,7 +54,7 @@ class SourceStageView(ChallengeStageView):
|
|||||||
def create_flow_token(self) -> FlowToken:
|
def create_flow_token(self) -> FlowToken:
|
||||||
"""Save the current flow state in a token that can be used to resume this flow"""
|
"""Save the current flow state in a token that can be used to resume this flow"""
|
||||||
pending_user: User = self.get_pending_user()
|
pending_user: User = self.get_pending_user()
|
||||||
if pending_user.is_anonymous:
|
if pending_user.is_anonymous or not pending_user.pk:
|
||||||
pending_user = get_anonymous_user()
|
pending_user = get_anonymous_user()
|
||||||
current_stage: SourceStage = self.executor.current_stage
|
current_stage: SourceStage = self.executor.current_stage
|
||||||
identifier = slugify(f"ak-source-stage-{current_stage.name}-{str(uuid4())}")
|
identifier = slugify(f"ak-source-stage-{current_stage.name}-{str(uuid4())}")
|
||||||
|
|||||||
@ -19,7 +19,8 @@ from rest_framework.serializers import ModelSerializer
|
|||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
from authentik.core.api.object_types import TypeCreateSerializer
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -45,7 +45,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
|
|||||||
|
|
||||||
def enrich_context(self, request: HttpRequest) -> dict:
|
def enrich_context(self, request: HttpRequest) -> dict:
|
||||||
# Different key `geoip` vs `geo` for legacy reasons
|
# Different key `geoip` vs `geo` for legacy reasons
|
||||||
return {"geoip": self.city(ClientIPMiddleware.get_client_ip(request))}
|
return {"geoip": self.city_dict(ClientIPMiddleware.get_client_ip(request))}
|
||||||
|
|
||||||
def city(self, ip_address: str) -> City | None:
|
def city(self, ip_address: str) -> City | None:
|
||||||
"""Wrapper for Reader.city"""
|
"""Wrapper for Reader.city"""
|
||||||
|
|||||||
@ -10,10 +10,10 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
|||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.enterprise.apps import EnterpriseConfig
|
|
||||||
from authentik.flows.api.flows import FlowSetSerializer
|
from authentik.flows.api.flows import FlowSetSerializer
|
||||||
from authentik.flows.models import ConfigurableStage, Stage
|
from authentik.flows.models import ConfigurableStage, Stage
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
@ -47,6 +47,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class StageViewSet(
|
class StageViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -63,25 +64,6 @@ class StageViewSet(
|
|||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
return Stage.objects.select_subclasses().prefetch_related("flow_set")
|
return Stage.objects.select_subclasses().prefetch_related("flow_set")
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable stage types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model, False):
|
|
||||||
subclass: Stage
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": subclass().component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
"requires_enterprise": isinstance(subclass._meta.app_config, EnterpriseConfig),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
data = sorted(data, key=lambda x: x["name"])
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|
||||||
@extend_schema(responses={200: UserSettingSerializer(many=True)})
|
@extend_schema(responses={200: UserSettingSerializer(many=True)})
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
def user_settings(self, request: Request) -> Response:
|
def user_settings(self, request: Request) -> Response:
|
||||||
|
|||||||
@ -203,7 +203,8 @@ class FlowPlanner:
|
|||||||
"f(plan): building plan",
|
"f(plan): building plan",
|
||||||
)
|
)
|
||||||
plan = self._build_plan(user, request, default_context)
|
plan = self._build_plan(user, request, default_context)
|
||||||
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
if self.use_cache:
|
||||||
|
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||||
if not plan.bindings and not self.allow_empty_flows:
|
if not plan.bindings and not self.allow_empty_flows:
|
||||||
raise EmptyFlowException()
|
raise EmptyFlowException()
|
||||||
return plan
|
return plan
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
from functools import cache as funccache
|
from functools import cache as funccache
|
||||||
from hashlib import md5
|
from hashlib import md5, sha256
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ from authentik.tenants.utils import get_current_tenant
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
|
|
||||||
GRAVATAR_URL = "https://secure.gravatar.com"
|
GRAVATAR_URL = "https://www.gravatar.com"
|
||||||
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
||||||
CACHE_KEY_GRAVATAR = "goauthentik.io/lib/avatars/"
|
CACHE_KEY_GRAVATAR = "goauthentik.io/lib/avatars/"
|
||||||
CACHE_KEY_GRAVATAR_AVAILABLE = "goauthentik.io/lib/avatars/gravatar_available"
|
CACHE_KEY_GRAVATAR_AVAILABLE = "goauthentik.io/lib/avatars/gravatar_available"
|
||||||
@ -55,10 +55,9 @@ def avatar_mode_gravatar(user: "User", mode: str) -> str | None:
|
|||||||
if not cache.get(CACHE_KEY_GRAVATAR_AVAILABLE, True):
|
if not cache.get(CACHE_KEY_GRAVATAR_AVAILABLE, True):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
mail_hash = sha256(user.email.lower().encode("utf-8")).hexdigest() # nosec
|
||||||
mail_hash = md5(user.email.lower().encode("utf-8")).hexdigest() # nosec
|
parameters = {"size": "158", "rating": "g", "default": "404"}
|
||||||
parameters = [("size", "158"), ("rating", "g"), ("default", "404")]
|
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters)}"
|
||||||
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
|
||||||
|
|
||||||
full_key = CACHE_KEY_GRAVATAR + mail_hash
|
full_key = CACHE_KEY_GRAVATAR + mail_hash
|
||||||
if cache.has_key(full_key):
|
if cache.has_key(full_key):
|
||||||
@ -84,7 +83,9 @@ def avatar_mode_gravatar(user: "User", mode: str) -> str | None:
|
|||||||
|
|
||||||
def generate_colors(text: str) -> tuple[str, str]:
|
def generate_colors(text: str) -> tuple[str, str]:
|
||||||
"""Generate colours based on `text`"""
|
"""Generate colours based on `text`"""
|
||||||
color = int(md5(text.lower().encode("utf-8")).hexdigest(), 16) % 0xFFFFFF # nosec
|
color = (
|
||||||
|
int(md5(text.lower().encode("utf-8"), usedforsecurity=False).hexdigest(), 16) % 0xFFFFFF
|
||||||
|
) # nosec
|
||||||
|
|
||||||
# Get a (somewhat arbitrarily) reduced scope of colors
|
# Get a (somewhat arbitrarily) reduced scope of colors
|
||||||
# to avoid too dark or light backgrounds
|
# to avoid too dark or light backgrounds
|
||||||
@ -179,7 +180,7 @@ def avatar_mode_generated(user: "User", mode: str) -> str | None:
|
|||||||
|
|
||||||
def avatar_mode_url(user: "User", mode: str) -> str | None:
|
def avatar_mode_url(user: "User", mode: str) -> str | None:
|
||||||
"""Format url"""
|
"""Format url"""
|
||||||
mail_hash = md5(user.email.lower().encode("utf-8")).hexdigest() # nosec
|
mail_hash = md5(user.email.lower().encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
|
||||||
return mode % {
|
return mode % {
|
||||||
"username": user.username,
|
"username": user.username,
|
||||||
"mail_hash": mail_hash,
|
"mail_hash": mail_hash,
|
||||||
|
|||||||
@ -304,6 +304,12 @@ class ConfigLoader:
|
|||||||
"""Wrapper for get that converts value into boolean"""
|
"""Wrapper for get that converts value into boolean"""
|
||||||
return str(self.get(path, default)).lower() == "true"
|
return str(self.get(path, default)).lower() == "true"
|
||||||
|
|
||||||
|
def get_keys(self, path: str, sep=".") -> list[str]:
|
||||||
|
"""List attribute keys by using yaml path"""
|
||||||
|
root = self.raw
|
||||||
|
attr: Attr = get_path_from_dict(root, path, sep=sep, default=Attr({}))
|
||||||
|
return attr.keys()
|
||||||
|
|
||||||
def get_dict_from_b64_json(self, path: str, default=None) -> dict:
|
def get_dict_from_b64_json(self, path: str, default=None) -> dict:
|
||||||
"""Wrapper for get that converts value from Base64 encoded string into dictionary"""
|
"""Wrapper for get that converts value from Base64 encoded string into dictionary"""
|
||||||
config_value = self.get(path)
|
config_value = self.get(path)
|
||||||
|
|||||||
@ -10,6 +10,10 @@ postgresql:
|
|||||||
use_pgpool: false
|
use_pgpool: false
|
||||||
test:
|
test:
|
||||||
name: test_authentik
|
name: test_authentik
|
||||||
|
read_replicas: {}
|
||||||
|
# For example
|
||||||
|
# 0:
|
||||||
|
# host: replica1.example.com
|
||||||
|
|
||||||
listen:
|
listen:
|
||||||
listen_http: 0.0.0.0:9000
|
listen_http: 0.0.0.0:9000
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import socket
|
|||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from ipaddress import ip_address, ip_network
|
from ipaddress import ip_address, ip_network
|
||||||
from textwrap import indent
|
from textwrap import indent
|
||||||
|
from types import CodeType
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from cachetools import TLRUCache, cached
|
from cachetools import TLRUCache, cached
|
||||||
@ -184,7 +185,7 @@ class BaseEvaluator:
|
|||||||
full_expression += f"\nresult = handler({handler_signature})"
|
full_expression += f"\nresult = handler({handler_signature})"
|
||||||
return full_expression
|
return full_expression
|
||||||
|
|
||||||
def compile(self, expression: str) -> Any:
|
def compile(self, expression: str) -> CodeType:
|
||||||
"""Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect."""
|
"""Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect."""
|
||||||
param_keys = self._context.keys()
|
param_keys = self._context.keys()
|
||||||
return compile(self.wrap_expression(expression, param_keys), self._filename, "exec")
|
return compile(self.wrap_expression(expression, param_keys), self._filename, "exec")
|
||||||
|
|||||||
@ -101,6 +101,9 @@ def get_logger_config():
|
|||||||
"uvicorn": "WARNING",
|
"uvicorn": "WARNING",
|
||||||
"gunicorn": "INFO",
|
"gunicorn": "INFO",
|
||||||
"requests_mock": "WARNING",
|
"requests_mock": "WARNING",
|
||||||
|
"hpack": "WARNING",
|
||||||
|
"httpx": "WARNING",
|
||||||
|
"azure": "WARNING",
|
||||||
}
|
}
|
||||||
for handler_name, level in handler_level_map.items():
|
for handler_name, level in handler_level_map.items():
|
||||||
base_config["loggers"][handler_name] = {
|
base_config["loggers"][handler_name] = {
|
||||||
|
|||||||
69
authentik/lib/sync/mapper.py
Normal file
69
authentik/lib/sync/mapper.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
from collections.abc import Generator
|
||||||
|
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.http import HttpRequest
|
||||||
|
|
||||||
|
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||||
|
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||||
|
from authentik.core.models import PropertyMapping, User
|
||||||
|
|
||||||
|
|
||||||
|
class PropertyMappingManager:
|
||||||
|
"""Pre-compile and cache property mappings when an identical
|
||||||
|
set is used multiple times"""
|
||||||
|
|
||||||
|
query_set: QuerySet[PropertyMapping]
|
||||||
|
mapping_subclass: type[PropertyMapping]
|
||||||
|
|
||||||
|
_evaluators: list[PropertyMappingEvaluator]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
qs: QuerySet[PropertyMapping],
|
||||||
|
# Expected subclass of PropertyMappings, any objects in the queryset
|
||||||
|
# that are not an instance of this class will be discarded
|
||||||
|
mapping_subclass: type[PropertyMapping],
|
||||||
|
# As they keys of parameters are part of the compilation,
|
||||||
|
# we need a list of all parameter names that will be used during evaluation
|
||||||
|
context_keys: list[str],
|
||||||
|
) -> None:
|
||||||
|
self.query_set = qs
|
||||||
|
self.mapping_subclass = mapping_subclass
|
||||||
|
self.context_keys = context_keys
|
||||||
|
self.compile()
|
||||||
|
|
||||||
|
def compile(self):
|
||||||
|
self._evaluators = []
|
||||||
|
for mapping in self.query_set:
|
||||||
|
if not isinstance(mapping, self.mapping_subclass):
|
||||||
|
continue
|
||||||
|
evaluator = PropertyMappingEvaluator(
|
||||||
|
mapping, **{key: None for key in self.context_keys}
|
||||||
|
)
|
||||||
|
# Compile and cache expression
|
||||||
|
evaluator.compile()
|
||||||
|
self._evaluators.append(evaluator)
|
||||||
|
|
||||||
|
def iter_eval(
|
||||||
|
self,
|
||||||
|
user: User | None,
|
||||||
|
request: HttpRequest | None,
|
||||||
|
return_mapping: bool = False,
|
||||||
|
**kwargs,
|
||||||
|
) -> Generator[tuple[dict, PropertyMapping], None]:
|
||||||
|
"""Iterate over all mappings that were pre-compiled and
|
||||||
|
execute all of them with the given context"""
|
||||||
|
for mapping in self._evaluators:
|
||||||
|
mapping.set_context(user, request, **kwargs)
|
||||||
|
try:
|
||||||
|
value = mapping.evaluate(mapping.model.expression)
|
||||||
|
except PropertyMappingExpressionException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except Exception as exc:
|
||||||
|
raise PropertyMappingExpressionException(exc, mapping.model) from exc
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
|
if return_mapping:
|
||||||
|
yield value, mapping.model
|
||||||
|
else:
|
||||||
|
yield value
|
||||||
@ -3,3 +3,6 @@
|
|||||||
PAGE_SIZE = 100
|
PAGE_SIZE = 100
|
||||||
PAGE_TIMEOUT = 60 * 60 * 0.5 # Half an hour
|
PAGE_TIMEOUT = 60 * 60 * 0.5 # Half an hour
|
||||||
HTTP_CONFLICT = 409
|
HTTP_CONFLICT = 409
|
||||||
|
HTTP_NO_CONTENT = 204
|
||||||
|
HTTP_SERVICE_UNAVAILABLE = 503
|
||||||
|
HTTP_TOO_MANY_REQUESTS = 429
|
||||||
|
|||||||
@ -47,8 +47,10 @@ class OutgoingSyncProviderStatusMixin:
|
|||||||
uid=slugify(provider.name),
|
uid=slugify(provider.name),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
status = {
|
with provider.sync_lock as lock_acquired:
|
||||||
"tasks": tasks,
|
status = {
|
||||||
"is_running": provider.sync_lock.locked(),
|
"tasks": tasks,
|
||||||
}
|
# If we could not acquire the lock, it means a task is using it, and thus is running
|
||||||
|
"is_running": not lock_acquired,
|
||||||
|
}
|
||||||
return Response(SyncStatusSerializer(status).data)
|
return Response(SyncStatusSerializer(status).data)
|
||||||
|
|||||||
@ -3,10 +3,18 @@
|
|||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from deepmerge import always_merger
|
||||||
from django.db import DatabaseError
|
from django.db import DatabaseError
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.lib.sync.outgoing.exceptions import NotFoundSyncException
|
from authentik.core.expression.exceptions import (
|
||||||
|
PropertyMappingExpressionException,
|
||||||
|
SkipObjectException,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
|
from authentik.lib.sync.outgoing.exceptions import NotFoundSyncException, StopSync
|
||||||
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
@ -28,6 +36,7 @@ class BaseOutgoingSyncClient[
|
|||||||
provider: TProvider
|
provider: TProvider
|
||||||
connection_type: type[TConnection]
|
connection_type: type[TConnection]
|
||||||
connection_type_query: str
|
connection_type_query: str
|
||||||
|
mapper: PropertyMappingManager
|
||||||
|
|
||||||
can_discover = False
|
can_discover = False
|
||||||
|
|
||||||
@ -39,26 +48,25 @@ class BaseOutgoingSyncClient[
|
|||||||
"""Create object in remote destination"""
|
"""Create object in remote destination"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def update(self, obj: TModel, connection: object):
|
def update(self, obj: TModel, connection: TConnection):
|
||||||
"""Update object in remote destination"""
|
"""Update object in remote destination"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def write(self, obj: TModel) -> tuple[TConnection, bool]:
|
def write(self, obj: TModel) -> tuple[TConnection, bool]:
|
||||||
"""Write object to destination. Uses self.create and self.update, but
|
"""Write object to destination. Uses self.create and self.update, but
|
||||||
can be overwritten for further logic"""
|
can be overwritten for further logic"""
|
||||||
remote_obj = self.connection_type.objects.filter(
|
connection = self.connection_type.objects.filter(
|
||||||
provider=self.provider, **{self.connection_type_query: obj}
|
provider=self.provider, **{self.connection_type_query: obj}
|
||||||
).first()
|
).first()
|
||||||
connection: TConnection | None = None
|
|
||||||
try:
|
try:
|
||||||
if not remote_obj:
|
if not connection:
|
||||||
connection = self.create(obj)
|
connection = self.create(obj)
|
||||||
return connection, True
|
return connection, True
|
||||||
try:
|
try:
|
||||||
self.update(obj, remote_obj)
|
self.update(obj, connection)
|
||||||
return remote_obj, False
|
return connection, False
|
||||||
except NotFoundSyncException:
|
except NotFoundSyncException:
|
||||||
remote_obj.delete()
|
connection.delete()
|
||||||
connection = self.create(obj)
|
connection = self.create(obj)
|
||||||
return connection, True
|
return connection, True
|
||||||
except DatabaseError as exc:
|
except DatabaseError as exc:
|
||||||
@ -71,9 +79,34 @@ class BaseOutgoingSyncClient[
|
|||||||
"""Delete object from destination"""
|
"""Delete object from destination"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def to_schema(self, obj: TModel) -> TSchema:
|
def to_schema(self, obj: TModel, connection: TConnection | None, **defaults) -> TSchema:
|
||||||
"""Convert object to destination schema"""
|
"""Convert object to destination schema"""
|
||||||
raise NotImplementedError()
|
raw_final_object = {}
|
||||||
|
try:
|
||||||
|
eval_kwargs = {
|
||||||
|
"request": None,
|
||||||
|
"provider": self.provider,
|
||||||
|
"connection": connection,
|
||||||
|
obj._meta.model_name: obj,
|
||||||
|
}
|
||||||
|
eval_kwargs.setdefault("user", None)
|
||||||
|
for value in self.mapper.iter_eval(**eval_kwargs):
|
||||||
|
always_merger.merge(raw_final_object, value)
|
||||||
|
except SkipObjectException as exc:
|
||||||
|
raise exc from exc
|
||||||
|
except PropertyMappingExpressionException as exc:
|
||||||
|
# Value error can be raised when assigning invalid data to an attribute
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||||
|
mapping=exc.mapping,
|
||||||
|
).save()
|
||||||
|
raise StopSync(exc, obj, exc.mapping) from exc
|
||||||
|
if not raw_final_object:
|
||||||
|
raise StopSync(ValueError("No mappings configured"), obj)
|
||||||
|
for key, value in defaults.items():
|
||||||
|
raw_final_object.setdefault(key, value)
|
||||||
|
return raw_final_object
|
||||||
|
|
||||||
def discover(self):
|
def discover(self):
|
||||||
"""Optional method. Can be used to implement a "discovery" where
|
"""Optional method. Can be used to implement a "discovery" where
|
||||||
|
|||||||
@ -17,6 +17,10 @@ class ObjectExistsSyncException(BaseSyncException):
|
|||||||
"""Exception when an object already exists in the remote system"""
|
"""Exception when an object already exists in the remote system"""
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestSyncException(BaseSyncException):
|
||||||
|
"""Exception when invalid data was sent to the remote system"""
|
||||||
|
|
||||||
|
|
||||||
class StopSync(BaseSyncException):
|
class StopSync(BaseSyncException):
|
||||||
"""Exception raised when a configuration error should stop the sync process"""
|
"""Exception raised when a configuration error should stop the sync process"""
|
||||||
|
|
||||||
|
|||||||
@ -1,14 +1,22 @@
|
|||||||
from typing import Any, Self
|
from typing import Any, Self
|
||||||
|
|
||||||
from django.core.cache import cache
|
import pglock
|
||||||
from django.db.models import Model, QuerySet
|
from django.db import connection
|
||||||
from redis.lock import Lock
|
from django.db.models import Model, QuerySet, TextChoices
|
||||||
|
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.lib.sync.outgoing import PAGE_TIMEOUT
|
|
||||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingSyncDeleteAction(TextChoices):
|
||||||
|
"""Action taken when a user/group is deleted in authentik. Suspend is not available for groups,
|
||||||
|
and will be treated as `do_nothing`"""
|
||||||
|
|
||||||
|
DO_NOTHING = "do_nothing"
|
||||||
|
DELETE = "delete"
|
||||||
|
SUSPEND = "suspend"
|
||||||
|
|
||||||
|
|
||||||
class OutgoingSyncProvider(Model):
|
class OutgoingSyncProvider(Model):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -23,10 +31,10 @@ class OutgoingSyncProvider(Model):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sync_lock(self) -> Lock:
|
def sync_lock(self) -> pglock.advisory:
|
||||||
"""Redis lock to prevent multiple parallel syncs happening"""
|
"""Postgres lock for syncing SCIM to prevent multiple parallel syncs happening"""
|
||||||
return Lock(
|
return pglock.advisory(
|
||||||
cache.client.get_client(),
|
lock_id=f"goauthentik.io/{connection.schema_name}/providers/outgoing-sync/{str(self.pk)}",
|
||||||
name=f"goauthentik.io/providers/outgoing-sync/{str(self.pk)}",
|
timeout=0,
|
||||||
timeout=(60 * 60 * PAGE_TIMEOUT) * 3,
|
side_effect=pglock.Return,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -47,7 +47,7 @@ def register_signals(
|
|||||||
return
|
return
|
||||||
task_sync_direct.delay(
|
task_sync_direct.delay(
|
||||||
class_to_path(instance.__class__), instance.pk, Direction.remove.value
|
class_to_path(instance.__class__), instance.pk, Direction.remove.value
|
||||||
)
|
).get(propagate=False)
|
||||||
|
|
||||||
pre_delete.connect(model_pre_delete, User, dispatch_uid=uid, weak=False)
|
pre_delete.connect(model_pre_delete, User, dispatch_uid=uid, weak=False)
|
||||||
pre_delete.connect(model_pre_delete, Group, dispatch_uid=uid, weak=False)
|
pre_delete.connect(model_pre_delete, Group, dispatch_uid=uid, weak=False)
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
from dataclasses import asdict
|
||||||
|
|
||||||
|
from celery.exceptions import Retry
|
||||||
from celery.result import allow_join_result
|
from celery.result import allow_join_result
|
||||||
from django.core.paginator import Paginator
|
from django.core.paginator import Paginator
|
||||||
from django.db.models import Model, QuerySet
|
from django.db.models import Model, QuerySet
|
||||||
@ -12,9 +14,14 @@ from authentik.core.models import Group, User
|
|||||||
from authentik.events.logs import LogEvent
|
from authentik.events.logs import LogEvent
|
||||||
from authentik.events.models import TaskStatus
|
from authentik.events.models import TaskStatus
|
||||||
from authentik.events.system_tasks import SystemTask
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.events.utils import sanitize_item
|
||||||
from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT
|
from authentik.lib.sync.outgoing import PAGE_SIZE, PAGE_TIMEOUT
|
||||||
from authentik.lib.sync.outgoing.base import Direction
|
from authentik.lib.sync.outgoing.base import Direction
|
||||||
from authentik.lib.sync.outgoing.exceptions import StopSync, TransientSyncException
|
from authentik.lib.sync.outgoing.exceptions import (
|
||||||
|
BadRequestSyncException,
|
||||||
|
StopSync,
|
||||||
|
TransientSyncException,
|
||||||
|
)
|
||||||
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||||
|
|
||||||
@ -59,17 +66,16 @@ class SyncTasks:
|
|||||||
).first()
|
).first()
|
||||||
if not provider:
|
if not provider:
|
||||||
return
|
return
|
||||||
lock = provider.sync_lock
|
|
||||||
if lock.locked():
|
|
||||||
self.logger.debug("Sync locked, skipping task", source=provider.name)
|
|
||||||
return
|
|
||||||
task.set_uid(slugify(provider.name))
|
task.set_uid(slugify(provider.name))
|
||||||
messages = []
|
messages = []
|
||||||
messages.append(_("Starting full provider sync"))
|
messages.append(_("Starting full provider sync"))
|
||||||
self.logger.debug("Starting provider sync")
|
self.logger.debug("Starting provider sync")
|
||||||
users_paginator = Paginator(provider.get_object_qs(User), PAGE_SIZE)
|
users_paginator = Paginator(provider.get_object_qs(User), PAGE_SIZE)
|
||||||
groups_paginator = Paginator(provider.get_object_qs(Group), PAGE_SIZE)
|
groups_paginator = Paginator(provider.get_object_qs(Group), PAGE_SIZE)
|
||||||
with allow_join_result(), lock:
|
with allow_join_result(), provider.sync_lock as lock_acquired:
|
||||||
|
if not lock_acquired:
|
||||||
|
self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name)
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
for page in users_paginator.page_range:
|
for page in users_paginator.page_range:
|
||||||
messages.append(_("Syncing page %(page)d of users" % {"page": page}))
|
messages.append(_("Syncing page %(page)d of users" % {"page": page}))
|
||||||
@ -78,7 +84,7 @@ class SyncTasks:
|
|||||||
time_limit=PAGE_TIMEOUT,
|
time_limit=PAGE_TIMEOUT,
|
||||||
soft_time_limit=PAGE_TIMEOUT,
|
soft_time_limit=PAGE_TIMEOUT,
|
||||||
).get():
|
).get():
|
||||||
messages.append(msg)
|
messages.append(LogEvent(**msg))
|
||||||
for page in groups_paginator.page_range:
|
for page in groups_paginator.page_range:
|
||||||
messages.append(_("Syncing page %(page)d of groups" % {"page": page}))
|
messages.append(_("Syncing page %(page)d of groups" % {"page": page}))
|
||||||
for msg in sync_objects.apply_async(
|
for msg in sync_objects.apply_async(
|
||||||
@ -86,7 +92,7 @@ class SyncTasks:
|
|||||||
time_limit=PAGE_TIMEOUT,
|
time_limit=PAGE_TIMEOUT,
|
||||||
soft_time_limit=PAGE_TIMEOUT,
|
soft_time_limit=PAGE_TIMEOUT,
|
||||||
).get():
|
).get():
|
||||||
messages.append(msg)
|
messages.append(LogEvent(**msg))
|
||||||
except TransientSyncException as exc:
|
except TransientSyncException as exc:
|
||||||
self.logger.warning("transient sync exception", exc=exc)
|
self.logger.warning("transient sync exception", exc=exc)
|
||||||
raise task.retry(exc=exc) from exc
|
raise task.retry(exc=exc) from exc
|
||||||
@ -120,40 +126,70 @@ class SyncTasks:
|
|||||||
try:
|
try:
|
||||||
client.write(obj)
|
client.write(obj)
|
||||||
except SkipObjectException:
|
except SkipObjectException:
|
||||||
|
self.logger.debug("skipping object due to SkipObject", obj=obj)
|
||||||
continue
|
continue
|
||||||
|
except BadRequestSyncException as exc:
|
||||||
|
self.logger.warning("failed to sync object", exc=exc, obj=obj)
|
||||||
|
messages.append(
|
||||||
|
asdict(
|
||||||
|
LogEvent(
|
||||||
|
_(
|
||||||
|
(
|
||||||
|
"Failed to sync {object_type} {object_name} "
|
||||||
|
"due to error: {error}"
|
||||||
|
).format_map(
|
||||||
|
{
|
||||||
|
"object_type": obj._meta.verbose_name,
|
||||||
|
"object_name": str(obj),
|
||||||
|
"error": str(exc),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
log_level="warning",
|
||||||
|
logger=f"{provider._meta.verbose_name}@{object_type}",
|
||||||
|
attributes={"arguments": exc.args[1:], "obj": sanitize_item(obj)},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
except TransientSyncException as exc:
|
except TransientSyncException as exc:
|
||||||
self.logger.warning("failed to sync object", exc=exc, user=obj)
|
self.logger.warning("failed to sync object", exc=exc, user=obj)
|
||||||
messages.append(
|
messages.append(
|
||||||
LogEvent(
|
asdict(
|
||||||
_(
|
LogEvent(
|
||||||
(
|
_(
|
||||||
"Failed to sync {object_type} {object_name} "
|
(
|
||||||
"due to transient error: {error}"
|
"Failed to sync {object_type} {object_name} "
|
||||||
).format_map(
|
"due to transient error: {error}"
|
||||||
{
|
).format_map(
|
||||||
"object_type": obj._meta.verbose_name,
|
{
|
||||||
"object_name": str(obj),
|
"object_type": obj._meta.verbose_name,
|
||||||
"error": str(exc),
|
"object_name": str(obj),
|
||||||
}
|
"error": str(exc),
|
||||||
)
|
}
|
||||||
),
|
)
|
||||||
log_level="warning",
|
),
|
||||||
logger="",
|
log_level="warning",
|
||||||
|
logger=f"{provider._meta.verbose_name}@{object_type}",
|
||||||
|
attributes={"obj": sanitize_item(obj)},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except StopSync as exc:
|
except StopSync as exc:
|
||||||
self.logger.warning("Stopping sync", exc=exc)
|
self.logger.warning("Stopping sync", exc=exc)
|
||||||
messages.append(
|
messages.append(
|
||||||
LogEvent(
|
asdict(
|
||||||
_(
|
LogEvent(
|
||||||
"Stopping sync due to error: {error}".format_map(
|
_(
|
||||||
{
|
"Stopping sync due to error: {error}".format_map(
|
||||||
"error": exc.detail(),
|
{
|
||||||
}
|
"error": exc.detail(),
|
||||||
)
|
}
|
||||||
),
|
)
|
||||||
log_level="warning",
|
),
|
||||||
logger="",
|
log_level="warning",
|
||||||
|
logger=f"{provider._meta.verbose_name}@{object_type}",
|
||||||
|
attributes={"obj": sanitize_item(obj)},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
@ -185,7 +221,9 @@ class SyncTasks:
|
|||||||
client.write(instance)
|
client.write(instance)
|
||||||
if operation == Direction.remove:
|
if operation == Direction.remove:
|
||||||
client.delete(instance)
|
client.delete(instance)
|
||||||
except (StopSync, TransientSyncException) as exc:
|
except TransientSyncException as exc:
|
||||||
|
raise Retry() from exc
|
||||||
|
except StopSync as exc:
|
||||||
self.logger.warning(exc, provider_pk=provider.pk)
|
self.logger.warning(exc, provider_pk=provider.pk)
|
||||||
|
|
||||||
def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]):
|
def sync_signal_m2m(self, group_pk: str, action: str, pk_set: list[int]):
|
||||||
@ -211,5 +249,7 @@ class SyncTasks:
|
|||||||
if action == "post_remove":
|
if action == "post_remove":
|
||||||
operation = Direction.remove
|
operation = Direction.remove
|
||||||
client.update_group(group, operation, pk_set)
|
client.update_group(group, operation, pk_set)
|
||||||
except (StopSync, TransientSyncException) as exc:
|
except TransientSyncException as exc:
|
||||||
|
raise Retry() from exc
|
||||||
|
except StopSync as exc:
|
||||||
self.logger.warning(exc, provider_pk=provider.pk)
|
self.logger.warning(exc, provider_pk=provider.pk)
|
||||||
|
|||||||
@ -169,3 +169,9 @@ class TestConfig(TestCase):
|
|||||||
self.assertEqual(config.get("cache.timeout_flows"), "32m")
|
self.assertEqual(config.get("cache.timeout_flows"), "32m")
|
||||||
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
|
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
|
||||||
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")
|
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")
|
||||||
|
|
||||||
|
def test_get_keys(self):
|
||||||
|
"""Test get_keys"""
|
||||||
|
config = ConfigLoader()
|
||||||
|
config.set("foo.bar", "baz")
|
||||||
|
self.assertEqual(list(config.get_keys("foo")), ["bar"])
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from authentik.lib.config import CONFIG
|
|||||||
SERVICE_HOST_ENV_NAME = "KUBERNETES_SERVICE_HOST"
|
SERVICE_HOST_ENV_NAME = "KUBERNETES_SERVICE_HOST"
|
||||||
|
|
||||||
|
|
||||||
def all_subclasses(cls, sort=True):
|
def all_subclasses[T](cls: T, sort=True) -> list[T] | set[T]:
|
||||||
"""Recursively return all subclassess of cls"""
|
"""Recursively return all subclassess of cls"""
|
||||||
classes = set(cls.__subclasses__()).union(
|
classes = set(cls.__subclasses__()).union(
|
||||||
[s for c in cls.__subclasses__() for s in all_subclasses(c, sort=sort)]
|
[s for c in cls.__subclasses__() for s in all_subclasses(c, sort=sort)]
|
||||||
|
|||||||
@ -117,8 +117,12 @@ class OutpostHealthSerializer(PassiveSerializer):
|
|||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
last_seen = DateTimeField(read_only=True)
|
last_seen = DateTimeField(read_only=True)
|
||||||
version = CharField(read_only=True)
|
version = CharField(read_only=True)
|
||||||
version_should = CharField(read_only=True)
|
golang_version = CharField(read_only=True)
|
||||||
|
openssl_enabled = BooleanField(read_only=True)
|
||||||
|
openssl_version = CharField(read_only=True)
|
||||||
|
fips_enabled = BooleanField(read_only=True)
|
||||||
|
|
||||||
|
version_should = CharField(read_only=True)
|
||||||
version_outdated = BooleanField(read_only=True)
|
version_outdated = BooleanField(read_only=True)
|
||||||
|
|
||||||
build_hash = CharField(read_only=True, required=False)
|
build_hash = CharField(read_only=True, required=False)
|
||||||
@ -173,6 +177,10 @@ class OutpostViewSet(UsedByMixin, ModelViewSet):
|
|||||||
"version_should": state.version_should,
|
"version_should": state.version_should,
|
||||||
"version_outdated": state.version_outdated,
|
"version_outdated": state.version_outdated,
|
||||||
"build_hash": state.build_hash,
|
"build_hash": state.build_hash,
|
||||||
|
"golang_version": state.golang_version,
|
||||||
|
"openssl_enabled": state.openssl_enabled,
|
||||||
|
"openssl_version": state.openssl_version,
|
||||||
|
"fips_enabled": state.fips_enabled,
|
||||||
"hostname": state.hostname,
|
"hostname": state.hostname,
|
||||||
"build_hash_should": get_build_hash(),
|
"build_hash_should": get_build_hash(),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,9 +15,12 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import (
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
MetaNameSerializer,
|
||||||
|
PassiveSerializer,
|
||||||
|
)
|
||||||
from authentik.outposts.models import (
|
from authentik.outposts.models import (
|
||||||
DockerServiceConnection,
|
DockerServiceConnection,
|
||||||
KubernetesServiceConnection,
|
KubernetesServiceConnection,
|
||||||
@ -57,6 +60,7 @@ class ServiceConnectionStateSerializer(PassiveSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ServiceConnectionViewSet(
|
class ServiceConnectionViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -70,23 +74,6 @@ class ServiceConnectionViewSet(
|
|||||||
search_fields = ["name"]
|
search_fields = ["name"]
|
||||||
filterset_fields = ["name"]
|
filterset_fields = ["name"]
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable service connection types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model):
|
|
||||||
subclass: OutpostServiceConnection
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": subclass().component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|
||||||
@extend_schema(responses={200: ServiceConnectionStateSerializer(many=False)})
|
@extend_schema(responses={200: ServiceConnectionStateSerializer(many=False)})
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
def state(self, request: Request, pk: str) -> Response:
|
def state(self, request: Request, pk: str) -> Response:
|
||||||
|
|||||||
@ -121,6 +121,10 @@ class OutpostConsumer(JsonWebsocketConsumer):
|
|||||||
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
||||||
state.version = msg.args.pop("version", None)
|
state.version = msg.args.pop("version", None)
|
||||||
state.build_hash = msg.args.pop("buildHash", "")
|
state.build_hash = msg.args.pop("buildHash", "")
|
||||||
|
state.golang_version = msg.args.pop("golangVersion", "")
|
||||||
|
state.openssl_enabled = msg.args.pop("opensslEnabled", False)
|
||||||
|
state.openssl_version = msg.args.pop("opensslVersion", "")
|
||||||
|
state.fips_enabled = msg.args.pop("fipsEnabled", False)
|
||||||
state.args.update(msg.args)
|
state.args.update(msg.args)
|
||||||
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
||||||
return
|
return
|
||||||
|
|||||||
@ -124,7 +124,6 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
self.update(current, reference)
|
self.update(current, reference)
|
||||||
self.logger.debug("Updating")
|
self.logger.debug("Updating")
|
||||||
except (OpenApiException, HTTPError) as exc:
|
except (OpenApiException, HTTPError) as exc:
|
||||||
|
|
||||||
if isinstance(exc, ApiException) and exc.status == 422: # noqa: PLR2004
|
if isinstance(exc, ApiException) and exc.status == 422: # noqa: PLR2004
|
||||||
self.logger.debug("Failed to update current, triggering re-create")
|
self.logger.debug("Failed to update current, triggering re-create")
|
||||||
self._recreate(current=current, reference=reference)
|
self._recreate(current=current, reference=reference)
|
||||||
|
|||||||
@ -131,7 +131,7 @@ class OutpostServiceConnection(models.Model):
|
|||||||
verbose_name = _("Outpost Service-Connection")
|
verbose_name = _("Outpost Service-Connection")
|
||||||
verbose_name_plural = _("Outpost Service-Connections")
|
verbose_name_plural = _("Outpost Service-Connections")
|
||||||
|
|
||||||
def __str__(self) -> __version__:
|
def __str__(self) -> str:
|
||||||
return f"Outpost service connection {self.name}"
|
return f"Outpost service connection {self.name}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -434,6 +434,10 @@ class OutpostState:
|
|||||||
version: str | None = field(default=None)
|
version: str | None = field(default=None)
|
||||||
version_should: Version = field(default=OUR_VERSION)
|
version_should: Version = field(default=OUR_VERSION)
|
||||||
build_hash: str = field(default="")
|
build_hash: str = field(default="")
|
||||||
|
golang_version: str = field(default="")
|
||||||
|
openssl_enabled: bool = field(default=False)
|
||||||
|
openssl_version: str = field(default="")
|
||||||
|
fips_enabled: bool = field(default=False)
|
||||||
hostname: str = field(default="")
|
hostname: str = field(default="")
|
||||||
args: dict = field(default_factory=dict)
|
args: dict = field(default_factory=dict)
|
||||||
|
|
||||||
|
|||||||
@ -13,10 +13,13 @@ from rest_framework.viewsets import GenericViewSet
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.api.applications import user_app_cache_key
|
from authentik.core.api.applications import user_app_cache_key
|
||||||
|
from authentik.core.api.object_types import TypesMixin
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import CacheSerializer, MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import (
|
||||||
|
CacheSerializer,
|
||||||
|
MetaNameSerializer,
|
||||||
|
)
|
||||||
from authentik.events.logs import LogEventSerializer, capture_logs
|
from authentik.events.logs import LogEventSerializer, capture_logs
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
|
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
|
||||||
from authentik.policies.models import Policy, PolicyBinding
|
from authentik.policies.models import Policy, PolicyBinding
|
||||||
from authentik.policies.process import PolicyProcess
|
from authentik.policies.process import PolicyProcess
|
||||||
@ -69,6 +72,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class PolicyViewSet(
|
class PolicyViewSet(
|
||||||
|
TypesMixin,
|
||||||
mixins.RetrieveModelMixin,
|
mixins.RetrieveModelMixin,
|
||||||
mixins.DestroyModelMixin,
|
mixins.DestroyModelMixin,
|
||||||
UsedByMixin,
|
UsedByMixin,
|
||||||
@ -89,23 +93,6 @@ class PolicyViewSet(
|
|||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
return Policy.objects.select_subclasses().prefetch_related("bindings", "promptstage_set")
|
return Policy.objects.select_subclasses().prefetch_related("bindings", "promptstage_set")
|
||||||
|
|
||||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
|
||||||
def types(self, request: Request) -> Response:
|
|
||||||
"""Get all creatable policy types"""
|
|
||||||
data = []
|
|
||||||
for subclass in all_subclasses(self.queryset.model):
|
|
||||||
subclass: Policy
|
|
||||||
data.append(
|
|
||||||
{
|
|
||||||
"name": subclass._meta.verbose_name,
|
|
||||||
"description": subclass.__doc__,
|
|
||||||
"component": subclass().component,
|
|
||||||
"model_name": subclass._meta.model_name,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return Response(TypeCreateSerializer(data, many=True).data)
|
|
||||||
|
|
||||||
@permission_required(None, ["authentik_policies.view_policy_cache"])
|
@permission_required(None, ["authentik_policies.view_policy_cache"])
|
||||||
@extend_schema(responses={200: CacheSerializer(many=False)})
|
@extend_schema(responses={200: CacheSerializer(many=False)})
|
||||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
|
|||||||
@ -102,7 +102,7 @@ class EventMatcherPolicy(Policy):
|
|||||||
result = checker(request, event)
|
result = checker(request, event)
|
||||||
if result is None:
|
if result is None:
|
||||||
continue
|
continue
|
||||||
LOGGER.info(
|
LOGGER.debug(
|
||||||
"Event matcher check result",
|
"Event matcher check result",
|
||||||
checker=checker.__name__,
|
checker=checker.__name__,
|
||||||
result=result,
|
result=result,
|
||||||
|
|||||||
@ -96,16 +96,42 @@ class TestEvaluator(TestCase):
|
|||||||
execution_logging=True,
|
execution_logging=True,
|
||||||
expression="ak_message(request.http_request.path)\nreturn True",
|
expression="ak_message(request.http_request.path)\nreturn True",
|
||||||
)
|
)
|
||||||
tmpl = f"""
|
expr2 = ExpressionPolicy.objects.create(
|
||||||
ak_message(request.http_request.path)
|
name=generate_id(),
|
||||||
res = ak_call_policy('{expr.name}')
|
execution_logging=True,
|
||||||
ak_message(request.http_request.path)
|
expression=f"""
|
||||||
for msg in res.messages:
|
ak_message(request.http_request.path)
|
||||||
ak_message(msg)
|
res = ak_call_policy('{expr.name}')
|
||||||
"""
|
ak_message(request.http_request.path)
|
||||||
evaluator = PolicyEvaluator("test")
|
for msg in res.messages:
|
||||||
evaluator.set_policy_request(self.request)
|
ak_message(msg)
|
||||||
res = evaluator.evaluate(tmpl)
|
""",
|
||||||
|
)
|
||||||
|
proc = PolicyProcess(PolicyBinding(policy=expr2), request=self.request, connection=None)
|
||||||
|
res = proc.profiling_wrapper()
|
||||||
|
self.assertEqual(res.messages, ("/", "/", "/"))
|
||||||
|
|
||||||
|
def test_call_policy_test_like(self):
|
||||||
|
"""test ak_call_policy without `obj` set, as if it was when testing policies"""
|
||||||
|
expr = ExpressionPolicy.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
execution_logging=True,
|
||||||
|
expression="ak_message(request.http_request.path)\nreturn True",
|
||||||
|
)
|
||||||
|
expr2 = ExpressionPolicy.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
execution_logging=True,
|
||||||
|
expression=f"""
|
||||||
|
ak_message(request.http_request.path)
|
||||||
|
res = ak_call_policy('{expr.name}')
|
||||||
|
ak_message(request.http_request.path)
|
||||||
|
for msg in res.messages:
|
||||||
|
ak_message(msg)
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
self.request.obj = None
|
||||||
|
proc = PolicyProcess(PolicyBinding(policy=expr2), request=self.request, connection=None)
|
||||||
|
res = proc.profiling_wrapper()
|
||||||
self.assertEqual(res.messages, ("/", "/", "/"))
|
self.assertEqual(res.messages, ("/", "/", "/"))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user