Compare commits

..

2 Commits

Author SHA1 Message Date
27e9e892e7 web: update the type names for routes.
While writing up the commit message for the previous commit, I realized that I didn't really like
the typenames as they're outlaid in `routeUtils`.  This is much more explicit, describing the
four types of routes we have: ListRoute, ViewRoute, InternalRedirect, ExternalRedirect.
2024-03-14 14:40:19 -07:00
70bf745c0c web: remove a lot of duplication from the Routes
This commit extracts the highly repetitive `new Route(new RegExp(...))` syntax into a mappable
function, leaving only the regular expression and the asynchronous loader behind.  In the process,
it creates a typed description of what kinds of routes we have, and uses type-level pattern matching
to generate the route handlers and redirects.

For example, it establishes that for the purposes of discriminating between redirects and loaders,
the loader type is irrelevant to routing correctly. The loader type *is* still well-typed to prevent
anyone from putting an incompatible loader into a route, but the two different loader types-- ones
that take arguments, and ones that do not-- do not matter to the route builder.

Likewise, the two different kinds of redirects *do* matter, but only because JavaScript makes a
distinction between methods of one argument that can be `call`'ed and methods of more than one
argument that must be `apply`'d.

I suppose I could make this irrelevant by enforcing that the second argument to RawRedirect always
be an Array, but that's not ergonomic, and when the `match()` function has such a lovely and simple
way of distinguishing between the two forms, ergonomics always wins.

This might seem like a curious bit of cleanup, but by exposing the regular expressions as strings,
independent of the Routes that they ultimately represent, we get the power to associate paths to
routeable objects with paths to navigable objects in the Sidebar, *and* ultimately with paths to
navigable objects in a command palette. It might also lead to replacing our Routes infrastructure
with an off-the-shelf router such as [Vaadin.router](https://github.com/vaadin/router) or Justin
Fagnani's preferred [page.js](https://visionmedia.github.io/page.js/)
2024-03-14 14:19:41 -07:00
671 changed files with 11970 additions and 48457 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2024.4.2
current_version = 2024.2.2
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
@ -21,8 +21,6 @@ optional_value = final
[bumpversion:file:schema.yml]
[bumpversion:file:blueprints/schema.json]
[bumpversion:file:authentik/__init__.py]
[bumpversion:file:internal/constants/constants.go]

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
custom: https://goauthentik.io/pricing/
github: [BeryJu]

View File

@ -12,7 +12,7 @@ should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower()
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
branch_name = os.environ["GITHUB_HEAD_REF"]
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
image_names = os.getenv("IMAGE_NAME").split(",")
image_arch = os.getenv("IMAGE_ARCH") or None
@ -54,9 +54,9 @@ image_main_tag = image_tags[0]
image_tags_rendered = ",".join(image_tags)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldBuild={should_build}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={image_tags_rendered}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)
print("shouldBuild=%s" % should_build, file=_output)
print("sha=%s" % sha, file=_output)
print("version=%s" % version, file=_output)
print("prerelease=%s" % prerelease, file=_output)
print("imageTags=%s" % image_tags_rendered, file=_output)
print("imageMainTag=%s" % image_main_tag, file=_output)

View File

@ -16,25 +16,25 @@ runs:
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
uses: actions/setup-python@v4
with:
python-version-file: "pyproject.toml"
cache: "poetry"
- name: Setup node
uses: actions/setup-node@v4
uses: actions/setup-node@v3
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Setup go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry install
cd web && npm ci
- name: Generate config

View File

@ -1,65 +0,0 @@
name: authentik-api-py-publish
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Install poetry & deps
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
cache: "poetry"
- name: Generate API Client
run: make gen-client-py
- name: Publish package
working-directory: gen-py-api/
run: |
poetry build
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
packages-dir: gen-py-api/dist/
# We can't easily upgrade the API client being used due to poetry being poetry
# so we'll have to rely on dependabot
# - name: Upgrade /
# run: |
# export VERSION=$(cd gen-py-api && poetry version -s)
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
# - uses: peter-evans/create-pull-request@v6
# id: cpr
# with:
# token: ${{ steps.generate_token.outputs.token }}
# branch: update-root-api-client
# commit-message: "root: bump API Client version"
# title: "root: bump API Client version"
# body: "root: bump API Client version"
# delete-branch: true
# signoff: true
# # ID from https://api.github.com/users/authentik-automation[bot]
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
# - uses: peter-evans/enable-pull-request-automerge@v3
# with:
# token: ${{ steps.generate_token.outputs.token }}
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
# merge-method: squash

View File

@ -7,6 +7,8 @@ on:
- main
- next
- version-*
paths-ignore:
- website/**
pull_request:
branches:
- main
@ -130,7 +132,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.10.0
uses: helm/kind-action@v1.9.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
@ -160,8 +162,6 @@ jobs:
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: radius
glob: tests/e2e/test_provider_radius*
- name: scim
glob: tests/e2e/test_source_scim*
- name: flows
glob: tests/e2e/test_flows*
steps:
@ -170,7 +170,7 @@ jobs:
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/docker-compose.yml up -d
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v4
with:

View File

@ -29,7 +29,7 @@ jobs:
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
uses: golangci/golangci-lint-action@v4
with:
version: v1.54.2
args: --timeout 5000s --verbose

View File

@ -34,13 +34,6 @@ jobs:
- name: Eslint
working-directory: ${{ matrix.project }}/
run: npm run lint
lint-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- working-directory: web/
run: |
[ -z "$(jq -r '.packages | to_entries[] | select((.key | startswith("node_modules")) and (.value | has("resolved") | not)) | .key' < package-lock.json)" ]
lint-build:
runs-on: ubuntu-latest
steps:
@ -102,7 +95,6 @@ jobs:
run: npm run lit-analyse
ci-web-mark:
needs:
- lint-lockfile
- lint-eslint
- lint-prettier
- lint-lit-analyse

View File

@ -12,13 +12,6 @@ on:
- version-*
jobs:
lint-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- working-directory: website/
run: |
[ -z "$(jq -r '.packages | to_entries[] | select((.key | startswith("node_modules")) and (.value | has("resolved") | not)) | .key' < package-lock.json)" ]
lint-prettier:
runs-on: ubuntu-latest
steps:
@ -69,7 +62,6 @@ jobs:
run: npm run ${{ matrix.job }}
ci-website-mark:
needs:
- lint-lockfile
- lint-prettier
- test
- build

View File

@ -1,43 +0,0 @@
name: authentik-gen-update-webauthn-mds
on:
workflow_dispatch:
schedule:
- cron: '30 1 1,15 * *'
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: poetry run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v6
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-fido-mds-client
commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -155,12 +155,12 @@ jobs:
- uses: actions/checkout@v4
- name: Run test suite in final docker images
run: |
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
sentry-release:
needs:
- build-server

View File

@ -14,16 +14,16 @@ jobs:
- uses: actions/checkout@v4
- name: Pre-release test
run: |
echo "PG_PASS=$(openssl rand 32 | base64)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64)" >> .env
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v2
with:

View File

@ -23,7 +23,7 @@ jobs:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had

View File

@ -1,4 +1,4 @@
name: authentik-api-ts-publish
name: authentik-web-api-publish
on:
push:
branches: [main]

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
ENV NODE_ENV=production
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
RUN npm run build-bundled
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
ENV NODE_ENV=production
@ -38,7 +38,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build
# Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.3-bookworm AS go-builder
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.1-bookworm AS go-builder
ARG TARGETOS
ARG TARGETARCH
@ -70,10 +70,10 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.1 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1"
ENV GEOIPUPDATE_VERBOSE="true"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
@ -84,7 +84,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM docker.io/python:3.12.3-slim-bookworm AS python-deps
FROM docker.io/python:3.12.2-slim-bookworm AS python-deps
WORKDIR /ak-root/poetry
@ -110,7 +110,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
poetry install --only=main --no-ansi --no-interaction --no-root"
# Stage 6: Run
FROM docker.io/python:3.12.3-slim-bookworm AS final-image
FROM docker.io/python:3.12.2-slim-bookworm AS final-image
ARG GIT_BUILD_HASH
ARG VERSION

View File

@ -9,7 +9,6 @@ PY_SOURCES = authentik tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api"
GEN_API_PY = "gen-py-api"
GEN_API_GO = "gen-go-api"
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
@ -19,7 +18,6 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/developer-docs/api/reference/**' \
authentik \
internal \
cmd \
@ -47,12 +45,12 @@ test-go:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
@ -66,7 +64,7 @@ lint-fix: ## Lint and automatically fix errors in the python source code. Repor
codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
bandit -r $(PY_SOURCES) -x node_modules
golangci-lint run -v
core-install:
@ -139,10 +137,7 @@ gen-clean-ts: ## Remove generated API client for Typescript
gen-clean-go: ## Remove generated API client for Go
rm -rf ./${GEN_API_GO}/
gen-clean-py: ## Remove generated API client for Python
rm -rf ./${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen-clean: gen-clean-ts gen-clean-go ## Remove generated API clients
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
docker run \
@ -160,20 +155,6 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
cd ./${GEN_API_TS} && npm i
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
-c /local/scripts/api-py-config.yaml \
--additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
pip install ./${GEN_API_PY}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml

View File

@ -25,10 +25,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Screenshots
| Light | Dark |
| ----------------------------------------------------------- | ---------------------------------------------------------- |
| ![](https://docs.goauthentik.io/img/screen_apps_light.jpg) | ![](https://docs.goauthentik.io/img/screen_apps_dark.jpg) |
| ![](https://docs.goauthentik.io/img/screen_admin_light.jpg) | ![](https://docs.goauthentik.io/img/screen_admin_dark.jpg) |
| Light | Dark |
| ------------------------------------------------------ | ----------------------------------------------------- |
| ![](https://goauthentik.io/img/screen_apps_light.jpg) | ![](https://goauthentik.io/img/screen_apps_dark.jpg) |
| ![](https://goauthentik.io/img/screen_admin_light.jpg) | ![](https://goauthentik.io/img/screen_admin_dark.jpg) |
## Development

View File

@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version)
| Version | Supported |
| --------- | --------- |
| 2023.10.x | ✅ |
| 2024.2.x | ✅ |
| Version | Supported |
| --- | --- |
| 2023.6.x | ✅ |
| 2023.8.x | ✅ |
## Reporting a Vulnerability
@ -31,12 +31,12 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
| Score | Severity |
| ---------- | -------- |
| 0.0 | None |
| 0.1 3.9 | Low |
| 4.0 6.9 | Medium |
| 7.0 8.9 | High |
| Score | Severity |
| --- | --- |
| 0.0 | None |
| 0.1 3.9 | Low |
| 4.0 6.9 | Medium |
| 7.0 8.9 | High |
| 9.0 10.0 | Critical |
## Disclosure process

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2024.4.2"
__version__ = "2024.2.2"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -10,7 +10,7 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.core.api.utils import PassiveSerializer
@ -19,7 +19,6 @@ class VersionSerializer(PassiveSerializer):
version_current = SerializerMethodField()
version_latest = SerializerMethodField()
version_latest_valid = SerializerMethodField()
build_hash = SerializerMethodField()
outdated = SerializerMethodField()
@ -39,10 +38,6 @@ class VersionSerializer(PassiveSerializer):
return __version__
return version_in_cache
def get_version_latest_valid(self, _) -> bool:
"""Check if latest version is valid"""
return cache.get(VERSION_CACHE_KEY) != VERSION_NULL
def get_outdated(self, instance) -> bool:
"""Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))

View File

@ -18,7 +18,6 @@ from authentik.lib.utils.http import get_http_session
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
VERSION_NULL = "0.0.0"
VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
@ -56,7 +55,7 @@ def clear_update_notifications():
def update_latest_version(self: SystemTask):
"""Update latest version info"""
if CONFIG.get_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskStatus.WARNING, "Version check disabled.")
return
try:
@ -83,7 +82,7 @@ def update_latest_version(self: SystemTask):
event_dict["message"] = f"Changelog: {match.group()}"
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
except (RequestException, IndexError) as exc:
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_error(exc)

View File

@ -10,3 +10,26 @@ class AuthentikAPIConfig(AppConfig):
label = "authentik_api"
mountpoint = "api/"
verbose_name = "authentik API"
def ready(self) -> None:
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from authentik.api.authentication import TokenAuthentication
# Class is defined here as it needs to be created early enough that drf-spectacular will
# find it, but also won't cause any import issues
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {
"type": "apiKey",
"in": "header",
"name": "Authorization",
"scheme": "bearer",
}

View File

@ -4,7 +4,6 @@ from hmac import compare_digest
from typing import Any
from django.conf import settings
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request
@ -103,14 +102,3 @@ class TokenAuthentication(BaseAuthentication):
return None
return (user, None) # pragma: no cover
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {"type": "http", "scheme": "bearer"}

View File

@ -12,7 +12,6 @@ from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from authentik.api.apps import AuthentikAPIConfig
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
@ -102,12 +101,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]

View File

@ -8,8 +8,6 @@ from django.apps import AppConfig
from django.db import DatabaseError, InternalError, ProgrammingError
from structlog.stdlib import BoundLogger, get_logger
from authentik.root.signals import startup
class ManagedAppConfig(AppConfig):
"""Basic reconciliation logic for apps"""
@ -25,12 +23,9 @@ class ManagedAppConfig(AppConfig):
def ready(self) -> None:
self.import_related()
startup.connect(self._on_startup_callback, dispatch_uid=self.label)
return super().ready()
def _on_startup_callback(self, sender, **_):
self._reconcile_global()
self._reconcile_tenant()
return super().ready()
def import_related(self):
"""Automatically import related modules which rely on just being imported

View File

@ -4,14 +4,12 @@ from json import dumps
from typing import Any
from django.core.management.base import BaseCommand, no_translations
from django.db.models import Model, fields
from drf_jsonschema_serializer.convert import converter, field_to_converter
from django.db.models import Model
from drf_jsonschema_serializer.convert import field_to_converter
from rest_framework.fields import Field, JSONField, UUIDField
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik import __version__
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
@ -20,23 +18,6 @@ from authentik.lib.models import SerializerModel
LOGGER = get_logger()
@converter
class PrimaryKeyRelatedFieldConverter:
"""Custom primary key field converter which is aware of non-integer based PKs
This is not an exhaustive fix for other non-int PKs, however in authentik we either
use UUIDs or ints"""
field_class = PrimaryKeyRelatedField
def convert(self, field: PrimaryKeyRelatedField):
model: Model = field.queryset.model
pk_field = model._meta.pk
if isinstance(pk_field, fields.UUIDField):
return {"type": "string", "format": "uuid"}
return {"type": "integer"}
class Command(BaseCommand):
"""Generate JSON Schema for blueprints"""
@ -48,7 +29,7 @@ class Command(BaseCommand):
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object",
"title": f"authentik {__version__} Blueprint schema",
"title": "authentik Blueprint schema",
"required": ["version", "entries"],
"properties": {
"version": {

View File

@ -39,7 +39,7 @@ def reconcile_app(app_name: str):
def wrapper(*args, **kwargs):
config = apps.get_app_config(app_name)
if isinstance(config, ManagedAppConfig):
config._on_startup_callback(None)
config.ready()
return func(*args, **kwargs)
return wrapper

View File

@ -556,11 +556,7 @@ class BlueprintDumper(SafeDumper):
def factory(items):
final_dict = dict(items)
# Remove internal state variables
final_dict.pop("_state", None)
# Future-proof to only remove the ID if we don't set a value
if "id" in final_dict and final_dict.get("id") is None:
final_dict.pop("id")
return final_dict
data = asdict(data, dict_factory=factory)

View File

@ -19,6 +19,8 @@ from guardian.models import UserObjectPermission
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import BaseSerializer, Serializer
from structlog.stdlib import BoundLogger, get_logger
from structlog.testing import capture_logs
from structlog.types import EventDict
from yaml import load
from authentik.blueprints.v1.common import (
@ -40,7 +42,6 @@ from authentik.core.models import (
from authentik.enterprise.license import LicenseKey
from authentik.enterprise.models import LicenseUsage
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.models import SystemTask
from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage
@ -51,8 +52,6 @@ from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
from authentik.providers.scim.models import SCIMGroup, SCIMUser
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
from authentik.tenants.models import Tenant
# Context set when the serializer is created in a blueprint context
@ -97,9 +96,6 @@ def excluded_models() -> list[type[Model]]:
AccessToken,
RefreshToken,
Reputation,
WebAuthnDeviceType,
SCIMSourceUser,
SCIMSourceGroup,
)
@ -165,7 +161,7 @@ class Importer:
def updater(value) -> Any:
if value in self.__pk_map:
self.logger.debug("Updating reference in entry", value=value)
self.logger.debug("updating reference in entry", value=value)
return self.__pk_map[value]
return value
@ -254,7 +250,7 @@ class Importer:
model_instance = existing_models.first()
if not isinstance(model(), BaseMetaModel) and model_instance:
self.logger.debug(
"Initialise serializer with instance",
"initialise serializer with instance",
model=model,
instance=model_instance,
pk=model_instance.pk,
@ -264,14 +260,14 @@ class Importer:
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
raise EntryInvalidError.from_entry(
(
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
"and object exists already",
),
entry,
)
else:
self.logger.debug(
"Initialised new serializer instance",
"initialised new serializer instance",
model=model,
**cleanse_dict(updated_identifiers),
)
@ -328,7 +324,7 @@ class Importer:
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
except LookupError:
self.logger.warning(
"App or Model does not exist", app=model_app_label, model=model_name
"app or model does not exist", app=model_app_label, model=model_name
)
return False
# Validate each single entry
@ -340,7 +336,7 @@ class Importer:
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
serializer = exc.serializer
else:
self.logger.warning(f"Entry invalid: {exc}", entry=entry, error=exc)
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
if raise_errors:
raise exc
return False
@ -360,14 +356,14 @@ class Importer:
and state == BlueprintEntryDesiredState.CREATED
):
self.logger.debug(
"Instance exists, skipping",
"instance exists, skipping",
model=model,
instance=instance,
pk=instance.pk,
)
else:
instance = serializer.save()
self.logger.debug("Updated model", model=instance)
self.logger.debug("updated model", model=instance)
if "pk" in entry.identifiers:
self.__pk_map[entry.identifiers["pk"]] = instance.pk
entry._state = BlueprintEntryState(instance)
@ -375,12 +371,12 @@ class Importer:
instance: Model | None = serializer.instance
if instance.pk:
instance.delete()
self.logger.debug("Deleted model", mode=instance)
self.logger.debug("deleted model", mode=instance)
continue
self.logger.debug("Entry to delete with no instance, skipping")
self.logger.debug("entry to delete with no instance, skipping")
return True
def validate(self, raise_validation_errors=False) -> tuple[bool, list[LogEvent]]:
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
"""Validate loaded blueprint export, ensure all models are allowed
and serializers have no errors"""
self.logger.debug("Starting blueprint import validation")
@ -394,7 +390,9 @@ class Importer:
):
successful = self._apply_models(raise_errors=raise_validation_errors)
if not successful:
self.logger.warning("Blueprint validation failed")
self.logger.debug("Blueprint validation failed")
for log in logs:
getattr(self.logger, log.get("log_level"))(**log)
self.logger.debug("Finished blueprint import validation")
self._import = orig_import
return successful, logs

View File

@ -30,7 +30,6 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.events.logs import capture_logs
from authentik.events.models import TaskStatus
from authentik.events.system_tasks import SystemTask, prefill_task
from authentik.events.utils import sanitize_dict
@ -212,15 +211,14 @@ def apply_blueprint(self: SystemTask, instance_pk: str):
if not valid:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, *logs)
self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs])
return
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, "Failed to apply")
return
with capture_logs() as logs:
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskStatus.ERROR, *logs)
return
instance.status = BlueprintInstanceStatus.SUCCESSFUL
instance.last_applied_hash = file_hash
instance.last_applied = now()

View File

@ -1,21 +0,0 @@
# Generated by Django 5.0.4 on 2024-04-18 18:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0005_tenantuuid_to_branduuid"),
]
operations = [
migrations.AddIndex(
model_name="brand",
index=models.Index(fields=["domain"], name="authentik_b_domain_b9b24a_idx"),
),
migrations.AddIndex(
model_name="brand",
index=models.Index(fields=["default"], name="authentik_b_default_3ccf12_idx"),
),
]

View File

@ -84,7 +84,3 @@ class Brand(SerializerModel):
class Meta:
verbose_name = _("Brand")
verbose_name_plural = _("Brands")
indexes = [
models.Index(fields=["domain"]),
models.Index(fields=["default"]),
]

View File

@ -20,15 +20,15 @@ from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from structlog.testing import capture_logs
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.api.pagination import Pagination
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.events.models import EventAction
from authentik.events.utils import sanitize_dict
from authentik.lib.utils.file import (
FilePathSerializer,
FileUploadSerializer,
@ -37,19 +37,16 @@ from authentik.lib.utils.file import (
)
from authentik.policies.api.exec import PolicyTestResultSerializer
from authentik.policies.engine import PolicyEngine
from authentik.policies.types import CACHE_PREFIX, PolicyResult
from authentik.policies.types import PolicyResult
from authentik.rbac.decorators import permission_required
from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()
def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str:
def user_app_cache_key(user_pk: str) -> str:
"""Cache key where application list for user is saved"""
key = f"{CACHE_PREFIX}/app_access/{user_pk}"
if page_number:
key += f"/{page_number}"
return key
return f"goauthentik.io/core/app_access/{user_pk}"
class ApplicationSerializer(ModelSerializer):
@ -185,9 +182,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if request.user.is_superuser:
log_messages = []
for log in logs:
if log.attributes.get("process", "") == "PolicyProcess":
if log.get("process", "") == "PolicyProcess":
continue
log_messages.append(LogEventSerializer(log).data)
log_messages.append(sanitize_dict(log))
result.log_messages = log_messages
response = PolicyTestResultSerializer(result)
return Response(response.data)
@ -217,8 +214,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
return super().list(request)
queryset = self._filter_queryset_for_list(self.get_queryset())
paginator: Pagination = self.paginator
paginated_apps = paginator.paginate_queryset(queryset, request)
pagined_apps = self.paginate_queryset(queryset)
if "for_user" in request.query_params:
try:
@ -232,22 +228,20 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
raise ValidationError({"for_user": "User not found"})
except ValueError as exc:
raise ValidationError from exc
allowed_applications = self._get_allowed_applications(paginated_apps, user=for_user)
allowed_applications = self._get_allowed_applications(pagined_apps, user=for_user)
serializer = self.get_serializer(allowed_applications, many=True)
return self.get_paginated_response(serializer.data)
allowed_applications = []
if not should_cache:
allowed_applications = self._get_allowed_applications(paginated_apps)
allowed_applications = self._get_allowed_applications(pagined_apps)
if should_cache:
allowed_applications = cache.get(
user_app_cache_key(self.request.user.pk, paginator.page.number)
)
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
if not allowed_applications:
LOGGER.debug("Caching allowed application list", page=paginator.page.number)
allowed_applications = self._get_allowed_applications(paginated_apps)
LOGGER.debug("Caching allowed application list")
allowed_applications = self._get_allowed_applications(pagined_apps)
cache.set(
user_app_cache_key(self.request.user.pk, paginator.page.number),
user_app_cache_key(self.request.user.pk),
allowed_applications,
timeout=86400,
)

View File

@ -5,19 +5,13 @@ from json import loads
from django.http import Http404
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
extend_schema,
extend_schema_field,
)
from drf_spectacular.utils import OpenApiResponse, extend_schema
from guardian.shortcuts import get_objects_for_user
from rest_framework.decorators import action
from rest_framework.fields import CharField, IntegerField, SerializerMethodField
from rest_framework.fields import CharField, IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
@ -51,7 +45,9 @@ class GroupSerializer(ModelSerializer):
"""Group Serializer"""
attributes = JSONDictField(required=False)
users_obj = SerializerMethodField(allow_null=True)
users_obj = ListSerializer(
child=GroupMemberSerializer(), read_only=True, source="users", required=False
)
roles_obj = ListSerializer(
child=RoleSerializer(),
read_only=True,
@ -62,19 +58,6 @@ class GroupSerializer(ModelSerializer):
num_pk = IntegerField(read_only=True)
@property
def _should_include_users(self) -> bool:
request: Request = self.context.get("request", None)
if not request:
return True
return str(request.query_params.get("include_users", "true")).lower() == "true"
@extend_schema_field(GroupMemberSerializer(many=True))
def get_users_obj(self, instance: Group) -> list[GroupMemberSerializer] | None:
if not self._should_include_users:
return None
return GroupMemberSerializer(instance.users, many=True).data
def validate_parent(self, parent: Group | None):
"""Validate group parent (if set), ensuring the parent isn't itself"""
if not self.instance or not parent:
@ -101,10 +84,7 @@ class GroupSerializer(ModelSerializer):
extra_kwargs = {
"users": {
"default": list,
},
# TODO: This field isn't unique on the database which is hard to backport
# hence we just validate the uniqueness here
"name": {"validators": [UniqueValidator(Group.objects.all())]},
}
}
@ -150,35 +130,22 @@ class GroupFilter(FilterSet):
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
class UserAccountSerializer(PassiveSerializer):
"""Account adding/removing operations"""
pk = IntegerField(required=True)
class GroupViewSet(UsedByMixin, ModelViewSet):
"""Group Viewset"""
class UserAccountSerializer(PassiveSerializer):
"""Account adding/removing operations"""
pk = IntegerField(required=True)
queryset = Group.objects.none()
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
serializer_class = GroupSerializer
search_fields = ["name", "is_superuser"]
filterset_class = GroupFilter
ordering = ["name"]
def get_queryset(self):
base_qs = Group.objects.all().select_related("parent").prefetch_related("roles")
if self.serializer_class(context={"request": self.request})._should_include_users:
base_qs = base_qs.prefetch_related("users")
return base_qs
@extend_schema(
parameters=[
OpenApiParameter("include_users", bool, default=True),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@permission_required("authentik_core.add_user_to_group")
@permission_required(None, ["authentik_core.add_user"])
@extend_schema(
request=UserAccountSerializer,
responses={
@ -186,13 +153,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
404: OpenApiResponse(description="User not found"),
},
)
@action(
detail=True,
methods=["POST"],
pagination_class=None,
filter_backends=[],
permission_classes=[],
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
def add_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()
@ -208,7 +169,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
group.users.add(user)
return Response(status=204)
@permission_required("authentik_core.remove_user_from_group")
@permission_required(None, ["authentik_core.add_user"])
@extend_schema(
request=UserAccountSerializer,
responses={
@ -216,13 +177,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
404: OpenApiResponse(description="User not found"),
},
)
@action(
detail=True,
methods=["POST"],
pagination_class=None,
filter_backends=[],
permission_classes=[],
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
def remove_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()

View File

@ -2,7 +2,6 @@
from typing import Any
from django.utils.timezone import now
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import assign_perm, get_anonymous_user
@ -21,17 +20,9 @@ from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
Token,
TokenIntents,
User,
default_token_duration,
)
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
from authentik.lib.utils.time import timedelta_from_string
from authentik.rbac.decorators import permission_required
@ -58,32 +49,6 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
attrs.setdefault("intent", TokenIntents.INTENT_API)
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"})
if attrs.get("intent") == TokenIntents.INTENT_APP_PASSWORD:
# user IS in attrs
user: User = attrs.get("user")
max_token_lifetime = user.group_attributes(request).get(
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
)
max_token_lifetime_dt = default_token_duration()
if max_token_lifetime is not None:
try:
max_token_lifetime_dt = now() + timedelta_from_string(max_token_lifetime)
except ValueError:
pass
if "expires" in attrs and attrs.get("expires") > max_token_lifetime_dt:
raise ValidationError(
{
"expires": (
f"Token expires exceeds maximum lifetime ({max_token_lifetime_dt} UTC)."
)
}
)
elif attrs.get("intent") == TokenIntents.INTENT_API:
# For API tokens, expires cannot be overridden
attrs["expires"] = default_token_duration()
return attrs
class Meta:

View File

@ -85,7 +85,7 @@ class UserGroupSerializer(ModelSerializer):
"""Simplified Group Serializer for user's groups"""
attributes = JSONDictField(required=False)
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
parent_name = CharField(source="parent.name", read_only=True)
class Meta:
model = Group
@ -113,26 +113,13 @@ class UserSerializer(ModelSerializer):
queryset=Group.objects.all().order_by("name"),
default=list,
)
groups_obj = SerializerMethodField(allow_null=True)
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
uid = CharField(read_only=True)
username = CharField(
max_length=150,
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
)
@property
def _should_include_groups(self) -> bool:
request: Request = self.context.get("request", None)
if not request:
return True
return str(request.query_params.get("include_groups", "true")).lower() == "true"
@extend_schema_field(UserGroupSerializer(many=True))
def get_groups_obj(self, instance: User) -> list[UserGroupSerializer] | None:
if not self._should_include_groups:
return None
return UserGroupSerializer(instance.ak_groups, many=True).data
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
@ -407,19 +394,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
search_fields = ["username", "name", "is_active", "email", "uuid"]
filterset_class = UsersFilter
def get_queryset(self):
base_qs = User.objects.all().exclude_anonymous()
if self.serializer_class(context={"request": self.request})._should_include_groups:
base_qs = base_qs.prefetch_related("ak_groups")
return base_qs
@extend_schema(
parameters=[
OpenApiParameter("include_groups", bool, default=True),
]
)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def get_queryset(self): # pragma: no cover
return User.objects.all().exclude_anonymous().prefetch_related("ak_groups")
def _create_recovery_link(self) -> tuple[str, Token]:
"""Create a recovery link (when the current brand has a recovery flow set),

View File

@ -0,0 +1,7 @@
"""authentik core exceptions"""
from authentik.lib.sentry import SentryIgnoredException
class PropertyMappingExpressionException(SentryIgnoredException):
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""

View File

@ -6,7 +6,6 @@ from django.db.models import Model
from django.http import HttpRequest
from prometheus_client import Histogram
from authentik.core.expression.exceptions import SkipObjectException
from authentik.core.models import User
from authentik.events.models import Event, EventAction
from authentik.lib.expression.evaluator import BaseEvaluator
@ -48,7 +47,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
self._context["request"] = req
req.context.update(**kwargs)
self._context.update(**kwargs)
self._globals["SkipObject"] = SkipObjectException
self.dry_run = dry_run
def handle_error(self, exc: Exception, expression_source: str):

View File

@ -1,13 +0,0 @@
"""authentik core exceptions"""
from authentik.lib.sentry import SentryIgnoredException
class PropertyMappingExpressionException(SentryIgnoredException):
"""Error when a PropertyMapping Exception expression could not be parsed or evaluated."""
class SkipObjectException(PropertyMappingExpressionException):
"""Exception which can be raised in a property mapping to skip syncing an object.
Only applies to Property mappings which sync objects, and not on mappings which transitively
apply to a single user"""

View File

@ -1,34 +1,10 @@
"""custom runserver command"""
from typing import TextIO
from daphne.management.commands.runserver import Command as RunServer
from daphne.server import Server
from authentik.root.signals import post_startup, pre_startup, startup
class SignalServer(Server):
"""Server which signals back to authentik when it finished starting up"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def ready_callable():
pre_startup.send(sender=self)
startup.send(sender=self)
post_startup.send(sender=self)
self.ready_callable = ready_callable
class Command(RunServer):
"""custom runserver command, which doesn't show the misleading django startup message"""
server_cls = SignalServer
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Redirect standard stdout banner from Daphne into the void
# as there are a couple more steps that happen before startup is fully done
self.stdout = TextIO()
def on_bind(self, server_port):
pass

View File

@ -5,7 +5,6 @@ from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
import authentik.core.models
from authentik.lib.generators import generate_id
def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
@ -17,10 +16,6 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
token.save()
def default_token_key():
return generate_id(60)
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0012_auto_20201003_1737"),
@ -67,7 +62,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name="token",
name="key",
field=models.TextField(default=default_token_key),
field=models.TextField(default=authentik.core.models.default_token_key),
),
migrations.AlterUniqueTogether(
name="token",

View File

@ -7,10 +7,9 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.providers.ldap.models import LDAPProvider
from authentik.providers.scim.models import SCIMProvider
from authentik.core.models import BackchannelProvider
for model in [LDAPProvider, SCIMProvider]:
for model in BackchannelProvider.__subclasses__():
try:
for obj in model.objects.only("is_backchannel"):
obj.is_backchannel = True

View File

@ -1,31 +0,0 @@
# Generated by Django 5.0.2 on 2024-02-29 10:15
from django.db import migrations, models
import authentik.core.models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0033_alter_user_options"),
("authentik_tenants", "0002_tenant_default_token_duration_and_more"),
]
operations = [
migrations.AlterField(
model_name="authenticatedsession",
name="expires",
field=models.DateTimeField(default=None, null=True),
),
migrations.AlterField(
model_name="token",
name="expires",
field=models.DateTimeField(default=None, null=True),
),
migrations.AlterField(
model_name="token",
name="key",
field=models.TextField(default=authentik.core.models.default_token_key),
),
]

View File

@ -1,52 +0,0 @@
# Generated by Django 5.0.4 on 2024-04-15 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
("authentik_core", "0034_alter_authenticatedsession_expires_and_more"),
("authentik_rbac", "0003_alter_systempermission_options"),
]
operations = [
migrations.AlterModelOptions(
name="group",
options={
"permissions": [
("add_user_to_group", "Add user to group"),
("remove_user_from_group", "Remove user from group"),
],
"verbose_name": "Group",
"verbose_name_plural": "Groups",
},
),
migrations.AddIndex(
model_name="group",
index=models.Index(fields=["name"], name="authentik_c_name_9ba8e4_idx"),
),
migrations.AddIndex(
model_name="user",
index=models.Index(fields=["last_login"], name="authentik_c_last_lo_f0179a_idx"),
),
migrations.AddIndex(
model_name="user",
index=models.Index(
fields=["password_change_date"], name="authentik_c_passwor_eec915_idx"
),
),
migrations.AddIndex(
model_name="user",
index=models.Index(fields=["uuid"], name="authentik_c_uuid_3dae2f_idx"),
),
migrations.AddIndex(
model_name="user",
index=models.Index(fields=["path"], name="authentik_c_path_b1f502_idx"),
),
migrations.AddIndex(
model_name="user",
index=models.Index(fields=["type"], name="authentik_c_type_ecf60d_idx"),
),
]

View File

@ -1,6 +1,6 @@
"""authentik core models"""
from datetime import datetime
from datetime import timedelta
from hashlib import sha256
from typing import Any, Optional, Self
from uuid import uuid4
@ -22,19 +22,18 @@ from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik.blueprints.models import ManagedModel
from authentik.core.expression.exceptions import PropertyMappingExpressionException
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.types import UILoginButton, UserSettingSerializer
from authentik.lib.avatars import get_avatar
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id
from authentik.lib.models import (
CreatedUpdatedModel,
DomainlessFormattedURLValidator,
SerializerModel,
)
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGTH
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
from authentik.tenants.utils import get_unique_identifier
LOGGER = get_logger()
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
@ -43,42 +42,33 @@ USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires"
USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout"
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME = "goauthentik.io/user/token-maximum-lifetime" # nosec
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name"
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
options.DEFAULT_NAMES = options.DEFAULT_NAMES + (
# used_by API that allows models to specify if they shadow an object
# for example the proxy provider which is built on top of an oauth provider
"authentik_used_by_shadows",
# List fields for which changes are not logged (due to them having dedicated objects)
# for example user's password and last_login
"authentik_signals_ignored_fields",
)
def default_token_duration() -> datetime:
def default_token_duration():
"""Default duration a Token is valid"""
current_tenant = get_current_tenant()
token_duration = (
current_tenant.default_token_duration
if hasattr(current_tenant, "default_token_duration")
else DEFAULT_TOKEN_DURATION
)
return now() + timedelta_from_string(token_duration)
return now() + timedelta(minutes=30)
def default_token_key() -> str:
def default_token_key():
"""Default token key"""
current_tenant = get_current_tenant()
token_length = (
current_tenant.default_token_length
if hasattr(current_tenant, "default_token_length")
else DEFAULT_TOKEN_LENGTH
)
# We use generate_id since the chars in the key should be easy
# to use in Emails (for verification) and URLs (for recovery)
return generate_id(token_length)
return generate_id(CONFIG.get_int("default_token_length"))
class UserTypes(models.TextChoices):
@ -177,13 +167,8 @@ class Group(SerializerModel):
"parent",
),
)
indexes = [models.Index(fields=["name"])]
verbose_name = _("Group")
verbose_name_plural = _("Groups")
permissions = [
("add_user_to_group", _("Add user to group")),
("remove_user_from_group", _("Remove user from group")),
]
class UserQuerySet(models.QuerySet):
@ -320,12 +305,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
("preview_user", _("Can preview user data sent to providers")),
("view_user_applications", _("View applications the user has access to")),
]
indexes = [
models.Index(fields=["last_login"]),
models.Index(fields=["password_change_date"]),
models.Index(fields=["uuid"]),
models.Index(fields=["path"]),
models.Index(fields=["type"]),
authentik_signals_ignored_fields = [
# Logged by the events `password_set`
# the `password_set` action/signal doesn't currently convey which user
# initiated the password change, so for now we'll log two actions
# ("password", "password_change_date"),
# Logged by `login`
("last_login",),
]
@ -631,9 +617,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
"""Get serializer for this model"""
raise NotImplementedError
def __str__(self) -> str:
return f"User-source connection (user={self.user_id}, source={self.source_id})"
class Meta:
unique_together = (("user", "source"),)
@ -641,7 +624,7 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
class ExpiringModel(models.Model):
"""Base Model which can expire, and is automatically cleaned up."""
expires = models.DateTimeField(default=None, null=True)
expires = models.DateTimeField(default=default_token_duration)
expiring = models.BooleanField(default=True)
class Meta:
@ -655,7 +638,7 @@ class ExpiringModel(models.Model):
return self.delete(*args, **kwargs)
@classmethod
def filter_not_expired(cls, **kwargs) -> QuerySet["Token"]:
def filter_not_expired(cls, **kwargs) -> QuerySet:
"""Filer for tokens which are not expired yet or are not expiring,
and match filters in `kwargs`"""
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):

View File

@ -10,14 +10,7 @@ from django.dispatch import receiver
from django.http.request import HttpRequest
from structlog.stdlib import get_logger
from authentik.core.models import (
Application,
AuthenticatedSession,
BackchannelProvider,
ExpiringModel,
User,
default_token_duration,
)
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
# Arguments: user: User, password: str
password_changed = Signal()
@ -68,12 +61,3 @@ def backchannel_provider_pre_save(sender: type[Model], instance: Model, **_):
if not isinstance(instance, BackchannelProvider):
return
instance.is_backchannel = True
@receiver(pre_save)
def expiring_model_pre_save(sender: type[Model], instance: Model, **_):
"""Ensure expires is set on ExpiringModels that are set to expire"""
if not issubclass(sender, ExpiringModel):
return
if instance.expiring and instance.expires is None:
instance.expires = default_token_duration()

View File

@ -16,9 +16,8 @@ from authentik.core.models import Source, SourceUserMatchingModes, User, UserSou
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION, PostUserEnrollmentStage
from authentik.events.models import Event, EventAction
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import Flow, FlowToken, Stage, in_memory_stage
from authentik.flows.models import Flow, Stage, in_memory_stage
from authentik.flows.planner import (
PLAN_CONTEXT_IS_RESTORED,
PLAN_CONTEXT_PENDING_USER,
PLAN_CONTEXT_REDIRECT,
PLAN_CONTEXT_SOURCE,
@ -36,8 +35,6 @@ from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
class Action(Enum):
"""Actions that can be decided based on the request
@ -100,6 +97,8 @@ class SourceFlowManager:
if self.request.user.is_authenticated:
new_connection.user = self.request.user
new_connection = self.update_connection(new_connection, **kwargs)
new_connection.save()
return Action.LINK, new_connection
existing_connections = self.connection_type.objects.filter(
@ -146,6 +145,7 @@ class SourceFlowManager:
]:
new_connection.user = user
new_connection = self.update_connection(new_connection, **kwargs)
new_connection.save()
return Action.LINK, new_connection
if self.source.user_matching_mode in [
SourceUserMatchingModes.EMAIL_DENY,
@ -222,43 +222,22 @@ class SourceFlowManager:
**kwargs,
) -> HttpResponse:
"""Prepare Authentication Plan, redirect user FlowExecutor"""
# Ensure redirect is carried through when user was trying to
# authorize application
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
NEXT_ARG_NAME, "authentik_core:if-user"
)
kwargs.update(
{
# Since we authenticate the user by their token, they have no backend set
PLAN_CONTEXT_AUTHENTICATION_BACKEND: BACKEND_INBUILT,
PLAN_CONTEXT_SSO: True,
PLAN_CONTEXT_SOURCE: self.source,
PLAN_CONTEXT_REDIRECT: final_redirect,
PLAN_CONTEXT_SOURCES_CONNECTION: connection,
}
)
kwargs.update(self.policy_context)
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
plan = token.plan
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
plan.context.update(kwargs)
for stage in self.get_stages_to_append(flow):
plan.append_stage(stage)
if stages:
for stage in stages:
plan.append_stage(stage)
self.request.session[SESSION_KEY_PLAN] = plan
flow_slug = token.flow.slug
token.delete()
return redirect_with_qs(
"authentik_core:if-flow",
self.request.GET,
flow_slug=flow_slug,
)
# Ensure redirect is carried through when user was trying to
# authorize application
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
NEXT_ARG_NAME, "authentik_core:if-user"
)
if PLAN_CONTEXT_REDIRECT not in kwargs:
kwargs[PLAN_CONTEXT_REDIRECT] = final_redirect
if not flow:
return bad_request_message(
self.request,

View File

@ -2,9 +2,7 @@
from datetime import datetime, timedelta
from django.conf import ImproperlyConfigured
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.contrib.sessions.backends.db import SessionStore as DBSessionStore
from django.core.cache import cache
from django.utils.timezone import now
from structlog.stdlib import get_logger
@ -17,7 +15,6 @@ from authentik.core.models import (
User,
)
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.lib.config import CONFIG
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@ -42,31 +39,16 @@ def clean_expired_models(self: SystemTask):
amount = 0
for session in AuthenticatedSession.objects.all():
match CONFIG.get("session_storage", "cache"):
case "cache":
cache_key = f"{KEY_PREFIX}{session.session_key}"
value = None
try:
value = cache.get(cache_key)
cache_key = f"{KEY_PREFIX}{session.session_key}"
value = None
try:
value = cache.get(cache_key)
except Exception as exc:
LOGGER.debug("Failed to get session from cache", exc=exc)
if not value:
session.delete()
amount += 1
case "db":
if not (
DBSessionStore.get_model_class()
.objects.filter(session_key=session.session_key, expire_date__gt=now())
.exists()
):
session.delete()
amount += 1
case _:
# Should never happen, as we check for other values in authentik/root/settings.py
raise ImproperlyConfigured(
"Invalid session_storage setting, allowed values are db and cache"
)
except Exception as exc:
LOGGER.debug("Failed to get session from cache", exc=exc)
if not value:
session.delete()
amount += 1
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")

View File

@ -1,11 +1,10 @@
"""Test Groups API"""
from django.urls.base import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Group, User
from authentik.core.tests.utils import create_test_admin_user, create_test_user
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.generators import generate_id
@ -13,22 +12,13 @@ class TestGroupsAPI(APITestCase):
"""Test Groups API"""
def setUp(self) -> None:
self.login_user = create_test_user()
self.admin = create_test_admin_user()
self.user = User.objects.create(username="test-user")
def test_list_with_users(self):
"""Test listing with users"""
admin = create_test_admin_user()
self.client.force_login(admin)
response = self.client.get(reverse("authentik_api:group-list"), {"include_users": "true"})
self.assertEqual(response.status_code, 200)
def test_add_user(self):
"""Test add_user"""
group = Group.objects.create(name=generate_id())
assign_perm("authentik_core.add_user_to_group", self.login_user, group)
assign_perm("authentik_core.view_user", self.login_user)
self.client.force_login(self.login_user)
self.client.force_login(self.admin)
res = self.client.post(
reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}),
data={
@ -42,9 +32,7 @@ class TestGroupsAPI(APITestCase):
def test_add_user_404(self):
"""Test add_user"""
group = Group.objects.create(name=generate_id())
assign_perm("authentik_core.add_user_to_group", self.login_user, group)
assign_perm("authentik_core.view_user", self.login_user)
self.client.force_login(self.login_user)
self.client.force_login(self.admin)
res = self.client.post(
reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}),
data={
@ -56,10 +44,8 @@ class TestGroupsAPI(APITestCase):
def test_remove_user(self):
"""Test remove_user"""
group = Group.objects.create(name=generate_id())
assign_perm("authentik_core.remove_user_from_group", self.login_user, group)
assign_perm("authentik_core.view_user", self.login_user)
group.users.add(self.user)
self.client.force_login(self.login_user)
self.client.force_login(self.admin)
res = self.client.post(
reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}),
data={
@ -73,10 +59,8 @@ class TestGroupsAPI(APITestCase):
def test_remove_user_404(self):
"""Test remove_user"""
group = Group.objects.create(name=generate_id())
assign_perm("authentik_core.remove_user_from_group", self.login_user, group)
assign_perm("authentik_core.view_user", self.login_user)
group.users.add(self.user)
self.client.force_login(self.login_user)
self.client.force_login(self.admin)
res = self.client.post(
reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}),
data={
@ -88,12 +72,11 @@ class TestGroupsAPI(APITestCase):
def test_parent_self(self):
"""Test parent"""
group = Group.objects.create(name=generate_id())
assign_perm("view_group", self.login_user, group)
assign_perm("change_group", self.login_user, group)
self.client.force_login(self.login_user)
self.client.force_login(self.admin)
res = self.client.patch(
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
data={
"pk": self.user.pk + 3,
"parent": group.pk,
},
)

View File

@ -3,7 +3,7 @@
from django.test import RequestFactory, TestCase
from guardian.shortcuts import get_anonymous_user
from authentik.core.expression.exceptions import PropertyMappingExpressionException
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.models import PropertyMapping
from authentik.core.tests.utils import create_test_admin_user
from authentik.events.models import Event, EventAction
@ -66,11 +66,14 @@ class TestPropertyMappings(TestCase):
expression="return request.http_request.path",
)
http_request = self.factory.get("/")
tmpl = f"""
res = ak_call_policy('{expr.name}')
tmpl = (
"""
res = ak_call_policy('%s')
result = [request.http_request.path, res.raw_result]
return result
"""
% expr.name
)
evaluator = PropertyMapping(expression=tmpl, name=generate_id())
res = evaluator.evaluate(self.user, http_request)
self.assertEqual(res, ["/", "/"])

View File

@ -48,21 +48,15 @@ class TestSourceFlowManager(TestCase):
def test_authenticated_link(self):
"""Test authenticated user linking"""
UserOAuthSourceConnection.objects.create(
user=get_anonymous_user(), source=self.source, identifier=self.identifier
)
user = User.objects.create(username="foo", email="foo@bar.baz")
flow_manager = OAuthSourceFlowManager(
self.source, get_request("/", user=user), self.identifier, {}
)
action, connection = flow_manager.get_action()
action, _ = flow_manager.get_action()
self.assertEqual(action, Action.LINK)
self.assertIsNone(connection.pk)
flow_manager.get_flow()
def test_unauthenticated_link(self):
"""Test un-authenticated user linking"""
flow_manager = OAuthSourceFlowManager(self.source, get_request("/"), self.identifier, {})
action, connection = flow_manager.get_action()
self.assertEqual(action, Action.LINK)
self.assertIsNone(connection.pk)
flow_manager.get_flow()
def test_unauthenticated_enroll_email(self):

View File

@ -1,6 +1,5 @@
"""Test token API"""
from datetime import datetime, timedelta
from json import loads
from django.urls.base import reverse
@ -8,13 +7,7 @@ from guardian.shortcuts import get_anonymous_user
from rest_framework.test import APITestCase
from authentik.core.api.tokens import TokenSerializer
from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
Token,
TokenIntents,
User,
)
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents, User
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.generators import generate_id
@ -83,77 +76,6 @@ class TestTokenAPI(APITestCase):
self.assertEqual(token.intent, TokenIntents.INTENT_API)
self.assertEqual(token.expiring, False)
def test_token_create_expiring(self):
"""Test token creation endpoint"""
self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True
self.user.save()
response = self.client.post(
reverse("authentik_api:token-list"), {"identifier": "test-token"}
)
self.assertEqual(response.status_code, 201)
token = Token.objects.get(identifier="test-token")
self.assertEqual(token.user, self.user)
self.assertEqual(token.intent, TokenIntents.INTENT_API)
self.assertEqual(token.expiring, True)
def test_token_create_expiring_custom_ok(self):
"""Test token creation endpoint"""
self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True
self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2"
self.user.save()
expires = datetime.now() + timedelta(hours=1)
response = self.client.post(
reverse("authentik_api:token-list"),
{
"identifier": "test-token",
"expires": expires,
"intent": TokenIntents.INTENT_APP_PASSWORD,
},
)
self.assertEqual(response.status_code, 201)
token = Token.objects.get(identifier="test-token")
self.assertEqual(token.user, self.user)
self.assertEqual(token.intent, TokenIntents.INTENT_APP_PASSWORD)
self.assertEqual(token.expiring, True)
self.assertEqual(token.expires.timestamp(), expires.timestamp())
def test_token_create_expiring_custom_nok(self):
"""Test token creation endpoint"""
self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True
self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2"
self.user.save()
expires = datetime.now() + timedelta(hours=3)
response = self.client.post(
reverse("authentik_api:token-list"),
{
"identifier": "test-token",
"expires": expires,
"intent": TokenIntents.INTENT_APP_PASSWORD,
},
)
self.assertEqual(response.status_code, 400)
def test_token_create_expiring_custom_api(self):
"""Test token creation endpoint"""
self.user.attributes[USER_ATTRIBUTE_TOKEN_EXPIRING] = True
self.user.attributes[USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME] = "hours=2"
self.user.save()
expires = datetime.now() + timedelta(seconds=3)
response = self.client.post(
reverse("authentik_api:token-list"),
{
"identifier": "test-token",
"expires": expires,
"intent": TokenIntents.INTENT_API,
},
)
self.assertEqual(response.status_code, 201)
token = Token.objects.get(identifier="test-token")
self.assertEqual(token.user, self.user)
self.assertEqual(token.intent, TokenIntents.INTENT_API)
self.assertEqual(token.expiring, True)
self.assertNotEqual(token.expires.timestamp(), expires.timestamp())
def test_list(self):
"""Test Token List (Test normal authentication)"""
Token.objects.all().delete()

View File

@ -41,12 +41,6 @@ class TestUsersAPI(APITestCase):
)
self.assertEqual(response.status_code, 200)
def test_list_with_groups(self):
"""Test listing with groups"""
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"})
self.assertEqual(response.status_code, 200)
def test_metrics(self):
"""Test user's metrics"""
self.client.force_login(self.admin)

View File

@ -8,6 +8,7 @@ from rest_framework.test import APITestCase
from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.config import CONFIG
from authentik.tenants.utils import get_current_tenant
@ -24,6 +25,7 @@ class TestUsersAvatars(APITestCase):
tenant.avatars = mode
tenant.save()
@CONFIG.patch("avatars", "none")
def test_avatars_none(self):
"""Test avatars none"""
self.set_avatar_mode("none")

View File

@ -4,7 +4,7 @@ from django.utils.text import slugify
from authentik.brands.models import Brand
from authentik.core.models import Group, User
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
from authentik.flows.models import Flow, FlowDesignation
from authentik.lib.generators import generate_id
@ -50,10 +50,12 @@ def create_test_brand(**kwargs) -> Brand:
return Brand.objects.create(domain=uid, default=True, **kwargs)
def create_test_cert(alg=PrivateKeyAlg.RSA) -> CertificateKeyPair:
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:
"""Generate a certificate for testing"""
builder = CertificateBuilder(f"{generate_id()}.self-signed.goauthentik.io")
builder.alg = alg
builder = CertificateBuilder(
name=f"{generate_id()}.self-signed.goauthentik.io",
use_ec_private_key=use_ec_private_key,
)
builder.build(
subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"],
validity_days=360,

View File

@ -14,13 +14,7 @@ from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
CharField,
ChoiceField,
DateTimeField,
IntegerField,
SerializerMethodField,
)
from rest_framework.fields import CharField, DateTimeField, IntegerField, SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
@ -32,7 +26,7 @@ from authentik.api.authorization import SecretKeyFilter
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
from authentik.crypto.apps import MANAGED_KEY
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction
from authentik.rbac.decorators import permission_required
@ -184,7 +178,6 @@ class CertificateGenerationSerializer(PassiveSerializer):
common_name = CharField()
subject_alt_name = CharField(required=False, allow_blank=True, label=_("Subject-alt name"))
validity_days = IntegerField(initial=365)
alg = ChoiceField(default=PrivateKeyAlg.RSA, choices=PrivateKeyAlg.choices)
class CertificateKeyPairFilter(FilterSet):
@ -247,7 +240,6 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
raw_san = data.validated_data.get("subject_alt_name", "")
sans = raw_san.split(",") if raw_san != "" else []
builder = CertificateBuilder(data.validated_data["common_name"])
builder.alg = data.validated_data["alg"]
builder.build(
subject_alt_names=sans,
validity_days=int(data.validated_data["validity_days"]),

View File

@ -9,28 +9,20 @@ from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from cryptography.x509.oid import NameOID
from django.db import models
from django.utils.translation import gettext_lazy as _
from authentik import __version__
from authentik.crypto.models import CertificateKeyPair
class PrivateKeyAlg(models.TextChoices):
"""Algorithm to create private key with"""
RSA = "rsa", _("rsa")
ECDSA = "ecdsa", _("ecdsa")
class CertificateBuilder:
"""Build self-signed certificates"""
common_name: str
alg: PrivateKeyAlg
def __init__(self, name: str):
self.alg = PrivateKeyAlg.RSA
_use_ec_private_key: bool
def __init__(self, name: str, use_ec_private_key=False):
self._use_ec_private_key = use_ec_private_key
self.__public_key = None
self.__private_key = None
self.__builder = None
@ -50,13 +42,11 @@ class CertificateBuilder:
def generate_private_key(self) -> PrivateKeyTypes:
"""Generate private key"""
if self.alg == PrivateKeyAlg.ECDSA:
if self._use_ec_private_key:
return ec.generate_private_key(curve=ec.SECP256R1())
if self.alg == PrivateKeyAlg.RSA:
return rsa.generate_private_key(
public_exponent=65537, key_size=4096, backend=default_backend()
)
raise ValueError(f"Invalid alg: {self.alg}")
return rsa.generate_private_key(
public_exponent=65537, key_size=4096, backend=default_backend()
)
def build(
self,

View File

@ -13,9 +13,9 @@ class AuthentikEnterpriseAuditConfig(EnterpriseConfig):
verbose_name = "authentik Enterprise.Audit"
default = True
def ready(self):
@EnterpriseConfig.reconcile_global
def install_middleware(self):
"""Install enterprise audit middleware"""
orig_import = "authentik.events.middleware.AuditMiddleware"
new_import = "authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware"
settings.MIDDLEWARE = [new_import if x == orig_import else x for x in settings.MIDDLEWARE]
return super().ready()

View File

@ -2,16 +2,16 @@
from copy import deepcopy
from functools import partial
from typing import Any
from django.apps.registry import apps
from django.core.files import File
from django.db import connection
from django.db.models import ManyToManyRel, Model
from django.db.models import Model
from django.db.models.expressions import BaseExpression, Combinable
from django.db.models.signals import post_init
from django.http import HttpRequest
from authentik.core.models import User
from authentik.events.middleware import AuditMiddleware, should_log_model
from authentik.events.utils import cleanse_dict, sanitize_item
@ -28,10 +28,13 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
super().connect(request)
if not self.enabled:
return
user = getattr(request, "user", self.anonymous_user)
if not user.is_authenticated:
user = self.anonymous_user
if not hasattr(request, "request_id"):
return
post_init.connect(
partial(self.post_init_handler, request=request),
partial(self.post_init_handler, user=user, request=request),
dispatch_uid=request.request_id,
weak=False,
)
@ -45,7 +48,7 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
post_init.disconnect(dispatch_uid=request.request_id)
def serialize_simple(self, model: Model) -> dict:
"""Serialize a model in a very simple way. No ForeignKeys or other relationships are
"""Serialize a model in a very simple way. No ForeginKeys or other relationships are
resolved"""
data = {}
deferred_fields = model.get_deferred_fields()
@ -71,12 +74,9 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
for key, value in before.items():
if after.get(key) != value:
diff[key] = {"previous_value": value, "new_value": after.get(key)}
for key, value in after.items():
if key not in before and key not in diff and before.get(key) != value:
diff[key] = {"previous_value": before.get(key), "new_value": value}
return sanitize_item(diff)
def post_init_handler(self, request: HttpRequest, sender, instance: Model, **_):
def post_init_handler(self, user: User, request: HttpRequest, sender, instance: Model, **_):
"""post_init django model handler"""
if not should_log_model(instance):
return
@ -90,6 +90,7 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
def post_save_handler(
self,
user: User,
request: HttpRequest,
sender,
instance: Model,
@ -102,37 +103,15 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
thread_kwargs = {}
if hasattr(instance, "_previous_state") or created:
prev_state = getattr(instance, "_previous_state", {})
if created:
prev_state = {}
# Get current state
new_state = self.serialize_simple(instance)
diff = self.diff(prev_state, new_state)
thread_kwargs["diff"] = diff
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
def m2m_changed_handler( # noqa: PLR0913
self,
request: HttpRequest,
sender,
instance: Model,
action: str,
pk_set: set[Any],
thread_kwargs: dict | None = None,
**_,
):
thread_kwargs = {}
m2m_field = None
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
_, _, action_direction = action.partition("_")
# resolve the "through" model to an actual field
for field in instance._meta.get_fields():
if not isinstance(field, ManyToManyRel):
continue
if field.through == sender:
m2m_field = field
if m2m_field:
# If we're clearing we just set the "flag" to True
if action_direction == "clear":
pk_set = True
thread_kwargs["diff"] = {m2m_field.related_name: {action_direction: pk_set}}
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
if not created:
ignored_field_sets = getattr(instance._meta, "authentik_signals_ignored_fields", [])
for field_set in ignored_field_sets:
if set(diff.keys()) == set(field_set):
return None
return super().post_save_handler(
user, request, sender, instance, created, thread_kwargs, **_
)

View File

@ -1,210 +0,0 @@
from unittest.mock import PropertyMock, patch
from django.apps import apps
from django.conf import settings
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import Group, User
from authentik.core.tests.utils import create_test_admin_user
from authentik.events.models import Event, EventAction
from authentik.events.utils import sanitize_item
from authentik.lib.generators import generate_id
class TestEnterpriseAudit(APITestCase):
"""Test audit middleware"""
def setUp(self) -> None:
self.user = create_test_admin_user()
def test_import(self):
"""Ensure middleware is imported when app.ready is called"""
# Revert import swap
orig_import = "authentik.events.middleware.AuditMiddleware"
new_import = "authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware"
settings.MIDDLEWARE = [orig_import if x == new_import else x for x in settings.MIDDLEWARE]
# Re-call ready()
apps.get_app_config("authentik_enterprise_audit").ready()
self.assertIn(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware", settings.MIDDLEWARE
)
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
def test_create(self):
"""Test create audit log"""
self.client.force_login(self.user)
username = generate_id()
response = self.client.post(
reverse("authentik_api:user-list"),
data={"name": generate_id(), "username": username, "groups": [], "path": "foo"},
)
user = User.objects.get(username=username)
self.assertEqual(response.status_code, 201)
events = Event.objects.filter(
action=EventAction.MODEL_CREATED,
context__model__model_name="user",
context__model__app="authentik_core",
context__model__pk=user.pk,
)
event = events.first()
self.assertIsNotNone(event)
self.assertIsNotNone(event.context["diff"])
diff = event.context["diff"]
self.assertEqual(
diff,
{
"name": {
"new_value": user.name,
"previous_value": None,
},
"path": {"new_value": "foo", "previous_value": None},
"type": {"new_value": "internal", "previous_value": None},
"uuid": {
"new_value": user.uuid.hex,
"previous_value": None,
},
"email": {"new_value": "", "previous_value": None},
"username": {
"new_value": user.username,
"previous_value": None,
},
"is_active": {"new_value": True, "previous_value": None},
"attributes": {"new_value": {}, "previous_value": None},
"date_joined": {
"new_value": sanitize_item(user.date_joined),
"previous_value": None,
},
"first_name": {"new_value": "", "previous_value": None},
"id": {"new_value": user.pk, "previous_value": None},
"last_name": {"new_value": "", "previous_value": None},
"password": {"new_value": "********************", "previous_value": None},
"password_change_date": {
"new_value": sanitize_item(user.password_change_date),
"previous_value": None,
},
},
)
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
def test_update(self):
"""Test update audit log"""
self.client.force_login(self.user)
user = create_test_admin_user()
current_name = user.name
new_name = generate_id()
response = self.client.patch(
reverse("authentik_api:user-detail", kwargs={"pk": user.id}),
data={"name": new_name},
)
user.refresh_from_db()
self.assertEqual(response.status_code, 200)
events = Event.objects.filter(
action=EventAction.MODEL_UPDATED,
context__model__model_name="user",
context__model__app="authentik_core",
context__model__pk=user.pk,
)
event = events.first()
self.assertIsNotNone(event)
self.assertIsNotNone(event.context["diff"])
diff = event.context["diff"]
self.assertEqual(
diff,
{
"name": {
"new_value": new_name,
"previous_value": current_name,
},
},
)
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
def test_delete(self):
"""Test delete audit log"""
self.client.force_login(self.user)
user = create_test_admin_user()
response = self.client.delete(
reverse("authentik_api:user-detail", kwargs={"pk": user.id}),
)
self.assertEqual(response.status_code, 204)
events = Event.objects.filter(
action=EventAction.MODEL_DELETED,
context__model__model_name="user",
context__model__app="authentik_core",
context__model__pk=user.pk,
)
event = events.first()
self.assertIsNotNone(event)
self.assertNotIn("diff", event.context)
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
def test_m2m_add(self):
"""Test m2m add audit log"""
self.client.force_login(self.user)
user = create_test_admin_user()
group = Group.objects.create(name=generate_id())
response = self.client.post(
reverse("authentik_api:group-add-user", kwargs={"pk": group.group_uuid}),
data={
"pk": user.pk,
},
)
self.assertEqual(response.status_code, 204)
events = Event.objects.filter(
action=EventAction.MODEL_UPDATED,
context__model__model_name="group",
context__model__app="authentik_core",
context__model__pk=group.pk.hex,
)
event = events.first()
self.assertIsNotNone(event)
self.assertIsNotNone(event.context["diff"])
diff = event.context["diff"]
self.assertEqual(
diff,
{"users": {"add": [user.pk]}},
)
@patch(
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
PropertyMock(return_value=True),
)
def test_m2m_remove(self):
"""Test m2m remove audit log"""
self.client.force_login(self.user)
user = create_test_admin_user()
group = Group.objects.create(name=generate_id())
response = self.client.post(
reverse("authentik_api:group-remove-user", kwargs={"pk": group.group_uuid}),
data={
"pk": user.pk,
},
)
self.assertEqual(response.status_code, 204)
events = Event.objects.filter(
action=EventAction.MODEL_UPDATED,
context__model__model_name="group",
context__model__app="authentik_core",
context__model__pk=group.pk.hex,
)
event = events.first()
self.assertIsNotNone(event)
self.assertIsNotNone(event.context["diff"])
diff = event.context["diff"]
self.assertEqual(
diff,
{"users": {"remove": [user.pk]}},
)

View File

@ -1,39 +0,0 @@
"""google Property mappings API Views"""
from django_filters.filters import AllValuesMultipleFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.propertymappings import PropertyMappingSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderMapping
class GoogleProviderMappingSerializer(PropertyMappingSerializer):
"""GoogleProviderMapping Serializer"""
class Meta:
model = GoogleWorkspaceProviderMapping
fields = PropertyMappingSerializer.Meta.fields
class GoogleProviderMappingFilter(FilterSet):
"""Filter for GoogleProviderMapping"""
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
class Meta:
model = GoogleWorkspaceProviderMapping
fields = "__all__"
class GoogleProviderMappingViewSet(UsedByMixin, ModelViewSet):
"""GoogleProviderMapping Viewset"""
queryset = GoogleWorkspaceProviderMapping.objects.all()
serializer_class = GoogleProviderMappingSerializer
filterset_class = GoogleProviderMappingFilter
search_fields = ["name"]
ordering = ["name"]

View File

@ -1,54 +0,0 @@
"""Google Provider API Views"""
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
class GoogleProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
"""GoogleProvider Serializer"""
class Meta:
model = GoogleWorkspaceProvider
fields = [
"pk",
"name",
"property_mappings",
"property_mappings_group",
"component",
"assigned_backchannel_application_slug",
"assigned_backchannel_application_name",
"verbose_name",
"verbose_name_plural",
"meta_model_name",
"delegated_subject",
"credentials",
"scopes",
"exclude_users_service_account",
"filter_group",
"user_delete_action",
"group_delete_action",
"default_group_email_domain",
]
extra_kwargs = {}
class GoogleProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin, ModelViewSet):
"""GoogleProvider Viewset"""
queryset = GoogleWorkspaceProvider.objects.all()
serializer_class = GoogleProviderSerializer
filterset_fields = [
"name",
"exclude_users_service_account",
"delegated_subject",
"filter_group",
]
search_fields = ["name"]
ordering = ["name"]
sync_single_task = google_workspace_sync

View File

@ -1,9 +0,0 @@
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseProviderGoogleConfig(EnterpriseConfig):
name = "authentik.enterprise.providers.google_workspace"
label = "authentik_providers_google_workspace"
verbose_name = "authentik Enterprise.Providers.Google Workspace"
default = True

View File

@ -1,71 +0,0 @@
from django.db.models import Model
from django.http import HttpResponseNotFound
from google.auth.exceptions import GoogleAuthError, TransportError
from googleapiclient.discovery import build
from googleapiclient.errors import Error, HttpError
from googleapiclient.http import HttpRequest
from httplib2 import HttpLib2Error, HttpLib2ErrorWithResponse
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.lib.sync.outgoing import HTTP_CONFLICT
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.exceptions import (
NotFoundSyncException,
ObjectExistsSyncException,
StopSync,
TransientSyncException,
)
class GoogleWorkspaceSyncClient[TModel: Model, TConnection: Model, TSchema: dict](
BaseOutgoingSyncClient[TModel, TConnection, TSchema, GoogleWorkspaceProvider]
):
"""Base client for syncing to google workspace"""
domains: list
def __init__(self, provider: GoogleWorkspaceProvider) -> None:
super().__init__(provider)
self.directory_service = build(
"admin",
"directory_v1",
cache_discovery=False,
**provider.google_credentials(),
)
self.__prefetch_domains()
def __prefetch_domains(self):
self.domains = []
domains = self._request(self.directory_service.domains().list(customer="my_customer"))
for domain in domains.get("domains", []):
domain_name = domain.get("domainName")
self.domains.append(domain_name)
def _request(self, request: HttpRequest):
try:
response = request.execute()
except GoogleAuthError as exc:
if isinstance(exc, TransportError):
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
raise StopSync(exc) from exc
except HttpLib2Error as exc:
if isinstance(exc, HttpLib2ErrorWithResponse):
self._response_handle_status_code(exc.response.status, exc)
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
except HttpError as exc:
self._response_handle_status_code(exc.status_code, exc)
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
except Error as exc:
raise TransientSyncException(f"Failed to send request: {str(exc)}") from exc
return response
def _response_handle_status_code(self, status_code: int, root_exc: Exception):
if status_code == HttpResponseNotFound.status_code:
raise NotFoundSyncException("Object not found") from root_exc
if status_code == HTTP_CONFLICT:
raise ObjectExistsSyncException("Object exists") from root_exc
def check_email_valid(self, *emails: str):
for email in emails:
if not any(email.endswith(f"@{domain_name}") for domain_name in self.domains):
raise TransientSyncException(f"Invalid email domain: {email}")

View File

@ -1,245 +0,0 @@
from deepmerge import always_merger
from django.db import transaction
from django.utils.text import slugify
from authentik.core.expression.exceptions import (
PropertyMappingExpressionException,
SkipObjectException,
)
from authentik.core.models import Group
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
from authentik.enterprise.providers.google_workspace.models import (
GoogleWorkspaceDeleteAction,
GoogleWorkspaceProviderGroup,
GoogleWorkspaceProviderMapping,
GoogleWorkspaceProviderUser,
)
from authentik.events.models import Event, EventAction
from authentik.lib.sync.outgoing.base import Direction
from authentik.lib.sync.outgoing.exceptions import (
NotFoundSyncException,
ObjectExistsSyncException,
StopSync,
TransientSyncException,
)
from authentik.lib.utils.errors import exception_to_string
class GoogleWorkspaceGroupClient(
GoogleWorkspaceSyncClient[Group, GoogleWorkspaceProviderGroup, dict]
):
"""Google client for groups"""
connection_type = GoogleWorkspaceProviderGroup
connection_type_query = "group"
can_discover = True
def to_schema(self, obj: Group) -> dict:
"""Convert authentik group"""
raw_google_group = {
"email": f"{slugify(obj.name)}@{self.provider.default_group_email_domain}"
}
for mapping in (
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
):
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
continue
try:
mapping: GoogleWorkspaceProviderMapping
value = mapping.evaluate(
user=None,
request=None,
group=obj,
provider=self.provider,
)
if value is None:
continue
always_merger.merge(raw_google_group, value)
except SkipObjectException as exc:
raise exc from exc
except (PropertyMappingExpressionException, ValueError) as exc:
# Value error can be raised when assigning invalid data to an attribute
Event.new(
EventAction.CONFIGURATION_ERROR,
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
mapping=mapping,
).save()
raise StopSync(exc, obj, mapping) from exc
if not raw_google_group:
raise StopSync(ValueError("No group mappings configured"), obj)
return raw_google_group
def delete(self, obj: Group):
"""Delete group"""
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=obj
).first()
if not google_group:
self.logger.debug("Group does not exist in Google, skipping")
return None
with transaction.atomic():
if self.provider.group_delete_action == GoogleWorkspaceDeleteAction.DELETE:
self._request(
self.directory_service.groups().delete(groupKey=google_group.google_id)
)
google_group.delete()
def create(self, group: Group):
"""Create group from scratch and create a connection object"""
google_group = self.to_schema(group)
self.check_email_valid(google_group["email"])
with transaction.atomic():
try:
response = self._request(self.directory_service.groups().insert(body=google_group))
except ObjectExistsSyncException:
# group already exists in google workspace, so we can connect them manually
# for groups we need to fetch the group from google as we connect on
# ID and not group email
group_data = self._request(
self.directory_service.groups().get(groupKey=google_group["email"])
)
GoogleWorkspaceProviderGroup.objects.create(
provider=self.provider, group=group, google_id=group_data["id"]
)
else:
GoogleWorkspaceProviderGroup.objects.create(
provider=self.provider, group=group, google_id=response["id"]
)
def update(self, group: Group, connection: GoogleWorkspaceProviderGroup):
"""Update existing group"""
google_group = self.to_schema(group)
self.check_email_valid(google_group["email"])
try:
return self._request(
self.directory_service.groups().update(
groupKey=connection.google_id,
body=google_group,
)
)
except NotFoundSyncException:
# Resource missing is handled by self.write, which will re-create the group
raise
def write(self, obj: Group):
google_group, created = super().write(obj)
if created:
self.create_sync_members(obj, google_group)
return google_group
def create_sync_members(self, obj: Group, google_group: dict):
"""Sync all members after a group was created"""
users = list(obj.users.order_by("id").values_list("id", flat=True))
connections = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user__pk__in=users
)
for user in connections:
try:
self._request(
self.directory_service.members().insert(
groupKey=google_group["id"],
body={
"email": user.google_id,
},
)
)
except TransientSyncException:
continue
def update_group(self, group: Group, action: Direction, users_set: set[int]):
"""Update a groups members"""
if action == Direction.add:
return self._patch_add_users(group, users_set)
if action == Direction.remove:
return self._patch_remove_users(group, users_set)
def _patch(self, google_group_id: str, direction: Direction, members: list[str]):
for user in members:
try:
if direction == Direction.add:
self._request(
self.directory_service.members().insert(
groupKey=google_group_id, body={"email": user}
)
)
if direction == Direction.remove:
self._request(
self.directory_service.members().delete(
groupKey=google_group_id, memberKey=user
)
)
except ObjectExistsSyncException:
pass
except TransientSyncException:
raise
def _patch_add_users(self, group: Group, users_set: set[int]):
"""Add users in users_set to group"""
if len(users_set) < 1:
return
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
if not google_group:
self.logger.warning(
"could not sync group membership, group does not exist", group=group
)
return
user_ids = list(
GoogleWorkspaceProviderUser.objects.filter(
user__pk__in=users_set, provider=self.provider
).values_list("google_id", flat=True)
)
if len(user_ids) < 1:
return
self._patch(google_group.google_id, Direction.add, user_ids)
def _patch_remove_users(self, group: Group, users_set: set[int]):
"""Remove users in users_set from group"""
if len(users_set) < 1:
return
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
if not google_group:
self.logger.warning(
"could not sync group membership, group does not exist", group=group
)
return
user_ids = list(
GoogleWorkspaceProviderUser.objects.filter(
user__pk__in=users_set, provider=self.provider
).values_list("google_id", flat=True)
)
if len(user_ids) < 1:
return
self._patch(google_group.google_id, Direction.remove, user_ids)
def discover(self):
"""Iterate through all groups and connect them with authentik groups if possible"""
request = self.directory_service.groups().list(
customer="my_customer", maxResults=500, orderBy="email"
)
while request:
response = request.execute()
for group in response.get("groups", []):
self._discover_single_group(group)
request = self.directory_service.groups().list_next(
previous_request=request, previous_response=response
)
def _discover_single_group(self, group: dict):
"""handle discovery of a single group"""
google_name = group["name"]
google_id = group["id"]
matching_authentik_group = (
self.provider.get_object_qs(Group).filter(name=google_name).first()
)
if not matching_authentik_group:
return
GoogleWorkspaceProviderGroup.objects.get_or_create(
provider=self.provider,
group=matching_authentik_group,
google_id=google_id,
)

View File

@ -1,41 +0,0 @@
from json import dumps
from httplib2 import Response
class MockHTTP:
_recorded_requests = []
_responses = {}
def __init__(
self,
raise_on_unrecorded=True,
) -> None:
self._recorded_requests = []
self._responses = {}
self.raise_on_unrecorded = raise_on_unrecorded
def add_response(self, uri: str, body: str | dict = "", meta: dict | None = None, method="GET"):
if isinstance(body, dict):
body = dumps(body)
self._responses[(uri, method.upper())] = (body, meta or {"status": "200"})
def requests(self):
return self._recorded_requests
def request(
self,
uri,
method="GET",
body=None,
headers=None,
redirections=1,
connection_type=None,
):
key = (uri, method.upper())
self._recorded_requests.append((uri, method, body, headers))
if key not in self._responses and self.raise_on_unrecorded:
raise AssertionError(key)
body, meta = self._responses[key]
return Response(meta), body.encode("utf-8")

View File

@ -1,141 +0,0 @@
from deepmerge import always_merger
from django.db import transaction
from authentik.core.expression.exceptions import (
PropertyMappingExpressionException,
SkipObjectException,
)
from authentik.core.models import User
from authentik.enterprise.providers.google_workspace.clients.base import GoogleWorkspaceSyncClient
from authentik.enterprise.providers.google_workspace.models import (
GoogleWorkspaceDeleteAction,
GoogleWorkspaceProviderMapping,
GoogleWorkspaceProviderUser,
)
from authentik.events.models import Event, EventAction
from authentik.lib.sync.outgoing.exceptions import (
ObjectExistsSyncException,
StopSync,
TransientSyncException,
)
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.utils import delete_none_values
class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceProviderUser, dict]):
"""Sync authentik users into google workspace"""
connection_type = GoogleWorkspaceProviderUser
connection_type_query = "user"
can_discover = True
def to_schema(self, obj: User) -> dict:
"""Convert authentik user"""
raw_google_user = {}
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
if not isinstance(mapping, GoogleWorkspaceProviderMapping):
continue
try:
mapping: GoogleWorkspaceProviderMapping
value = mapping.evaluate(
user=obj,
request=None,
provider=self.provider,
)
if value is None:
continue
always_merger.merge(raw_google_user, value)
except SkipObjectException as exc:
raise exc from exc
except (PropertyMappingExpressionException, ValueError) as exc:
# Value error can be raised when assigning invalid data to an attribute
Event.new(
EventAction.CONFIGURATION_ERROR,
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
mapping=mapping,
).save()
raise StopSync(exc, obj, mapping) from exc
if not raw_google_user:
raise StopSync(ValueError("No user mappings configured"), obj)
if "primaryEmail" not in raw_google_user:
raw_google_user["primaryEmail"] = str(obj.email)
return delete_none_values(raw_google_user)
def delete(self, obj: User):
"""Delete user"""
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=obj
).first()
if not google_user:
self.logger.debug("User does not exist in Google, skipping")
return None
with transaction.atomic():
response = None
if self.provider.user_delete_action == GoogleWorkspaceDeleteAction.DELETE:
response = self._request(
self.directory_service.users().delete(userKey=google_user.google_id)
)
elif self.provider.user_delete_action == GoogleWorkspaceDeleteAction.SUSPEND:
response = self._request(
self.directory_service.users().update(
userKey=google_user.google_id, body={"suspended": True}
)
)
google_user.delete()
return response
def create(self, user: User):
"""Create user from scratch and create a connection object"""
google_user = self.to_schema(user)
self.check_email_valid(
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
)
with transaction.atomic():
try:
response = self._request(self.directory_service.users().insert(body=google_user))
except ObjectExistsSyncException:
# user already exists in google workspace, so we can connect them manually
GoogleWorkspaceProviderUser.objects.create(
provider=self.provider, user=user, google_id=user.email
)
except TransientSyncException as exc:
raise exc
else:
GoogleWorkspaceProviderUser.objects.create(
provider=self.provider, user=user, google_id=response["primaryEmail"]
)
def update(self, user: User, connection: GoogleWorkspaceProviderUser):
"""Update existing user"""
google_user = self.to_schema(user)
self.check_email_valid(
google_user["primaryEmail"], *[x["address"] for x in google_user.get("emails", [])]
)
self._request(
self.directory_service.users().update(userKey=connection.google_id, body=google_user)
)
def discover(self):
"""Iterate through all users and connect them with authentik users if possible"""
request = self.directory_service.users().list(
customer="my_customer", maxResults=500, orderBy="email"
)
while request:
response = request.execute()
for user in response.get("users", []):
self._discover_single_user(user)
request = self.directory_service.users().list_next(
previous_request=request, previous_response=response
)
def _discover_single_user(self, user: dict):
"""handle discovery of a single user"""
email = user["primaryEmail"]
matching_authentik_user = self.provider.get_object_qs(User).filter(email=email).first()
if not matching_authentik_user:
return
GoogleWorkspaceProviderUser.objects.get_or_create(
provider=self.provider,
user=matching_authentik_user,
google_id=email,
)

View File

@ -1,167 +0,0 @@
# Generated by Django 5.0.4 on 2024-05-07 16:03
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_core", "0035_alter_group_options_and_more"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="GoogleWorkspaceProviderMapping",
fields=[
(
"propertymapping_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_core.propertymapping",
),
),
],
options={
"verbose_name": "Google Workspace Provider Mapping",
"verbose_name_plural": "Google Workspace Provider Mappings",
},
bases=("authentik_core.propertymapping",),
),
migrations.CreateModel(
name="GoogleWorkspaceProvider",
fields=[
(
"provider_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_core.provider",
),
),
("delegated_subject", models.EmailField(max_length=254)),
("credentials", models.JSONField()),
(
"scopes",
models.TextField(
default="https://www.googleapis.com/auth/admin.directory.user,https://www.googleapis.com/auth/admin.directory.group,https://www.googleapis.com/auth/admin.directory.group.member,https://www.googleapis.com/auth/admin.directory.domain.readonly"
),
),
("default_group_email_domain", models.TextField()),
("exclude_users_service_account", models.BooleanField(default=False)),
(
"user_delete_action",
models.TextField(
choices=[
("do_nothing", "Do Nothing"),
("delete", "Delete"),
("suspend", "Suspend"),
],
default="delete",
),
),
(
"group_delete_action",
models.TextField(
choices=[
("do_nothing", "Do Nothing"),
("delete", "Delete"),
("suspend", "Suspend"),
],
default="delete",
),
),
(
"filter_group",
models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="authentik_core.group",
),
),
(
"property_mappings_group",
models.ManyToManyField(
blank=True,
default=None,
help_text="Property mappings used for group creation/updating.",
to="authentik_core.propertymapping",
),
),
],
options={
"verbose_name": "Google Workspace Provider",
"verbose_name_plural": "Google Workspace Providers",
},
bases=("authentik_core.provider", models.Model),
),
migrations.CreateModel(
name="GoogleWorkspaceProviderGroup",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("google_id", models.TextField()),
(
"group",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
),
),
(
"provider",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_providers_google_workspace.googleworkspaceprovider",
),
),
],
options={
"unique_together": {("google_id", "group", "provider")},
},
),
migrations.CreateModel(
name="GoogleWorkspaceProviderUser",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("google_id", models.TextField()),
(
"provider",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_providers_google_workspace.googleworkspaceprovider",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
options={
"unique_together": {("google_id", "user", "provider")},
},
),
]

View File

@ -1,179 +0,0 @@
"""Google workspace sync provider"""
from typing import Any, Self
from uuid import uuid4
from django.db import models
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from google.oauth2.service_account import Credentials
from rest_framework.serializers import Serializer
from authentik.core.models import (
BackchannelProvider,
Group,
PropertyMapping,
User,
UserTypes,
)
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
def default_scopes() -> list[str]:
return [
"https://www.googleapis.com/auth/admin.directory.user",
"https://www.googleapis.com/auth/admin.directory.group",
"https://www.googleapis.com/auth/admin.directory.group.member",
"https://www.googleapis.com/auth/admin.directory.domain.readonly",
]
class GoogleWorkspaceDeleteAction(models.TextChoices):
"""Action taken when a user/group is deleted in authentik. Suspend is not available for groups,
and will be treated as `do_nothing`"""
DO_NOTHING = "do_nothing"
DELETE = "delete"
SUSPEND = "suspend"
class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
"""Sync users from authentik into Google Workspace."""
delegated_subject = models.EmailField()
credentials = models.JSONField()
scopes = models.TextField(default=",".join(default_scopes()))
default_group_email_domain = models.TextField()
exclude_users_service_account = models.BooleanField(default=False)
user_delete_action = models.TextField(
choices=GoogleWorkspaceDeleteAction.choices, default=GoogleWorkspaceDeleteAction.DELETE
)
group_delete_action = models.TextField(
choices=GoogleWorkspaceDeleteAction.choices, default=GoogleWorkspaceDeleteAction.DELETE
)
filter_group = models.ForeignKey(
"authentik_core.group", on_delete=models.SET_DEFAULT, default=None, null=True
)
property_mappings_group = models.ManyToManyField(
PropertyMapping,
default=None,
blank=True,
help_text=_("Property mappings used for group creation/updating."),
)
def client_for_model(
self, model: type[User | Group]
) -> BaseOutgoingSyncClient[User | Group, Any, Any, Self]:
if issubclass(model, User):
from authentik.enterprise.providers.google_workspace.clients.users import (
GoogleWorkspaceUserClient,
)
return GoogleWorkspaceUserClient(self)
if issubclass(model, Group):
from authentik.enterprise.providers.google_workspace.clients.groups import (
GoogleWorkspaceGroupClient,
)
return GoogleWorkspaceGroupClient(self)
raise ValueError(f"Invalid model {model}")
def get_object_qs(self, type: type[User | Group]) -> QuerySet[User | Group]:
if type == User:
# Get queryset of all users with consistent ordering
# according to the provider's settings
base = User.objects.all().exclude_anonymous()
if self.exclude_users_service_account:
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
)
if self.filter_group:
base = base.filter(ak_groups__in=[self.filter_group])
return base.order_by("pk")
if type == Group:
# Get queryset of all groups with consistent ordering
return Group.objects.all().order_by("pk")
raise ValueError(f"Invalid type {type}")
def google_credentials(self):
return {
"credentials": Credentials.from_service_account_info(
self.credentials, scopes=self.scopes.split(",")
).with_subject(self.delegated_subject),
}
@property
def component(self) -> str:
return "ak-provider-google-workspace-form"
@property
def serializer(self) -> type[Serializer]:
from authentik.enterprise.providers.google_workspace.api.providers import (
GoogleProviderSerializer,
)
return GoogleProviderSerializer
def __str__(self):
return f"Google Workspace Provider {self.name}"
class Meta:
verbose_name = _("Google Workspace Provider")
verbose_name_plural = _("Google Workspace Providers")
class GoogleWorkspaceProviderMapping(PropertyMapping):
"""Map authentik data to outgoing Google requests"""
@property
def component(self) -> str:
return "ak-property-mapping-google-workspace-form"
@property
def serializer(self) -> type[Serializer]:
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
GoogleProviderMappingSerializer,
)
return GoogleProviderMappingSerializer
def __str__(self):
return f"Google Workspace Provider Mapping {self.name}"
class Meta:
verbose_name = _("Google Workspace Provider Mapping")
verbose_name_plural = _("Google Workspace Provider Mappings")
class GoogleWorkspaceProviderUser(models.Model):
"""Mapping of a user and provider to a Google user ID"""
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
google_id = models.TextField()
user = models.ForeignKey(User, on_delete=models.CASCADE)
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
class Meta:
unique_together = (("google_id", "user", "provider"),)
def __str__(self) -> str:
return f"Google Workspace User {self.user_id} to {self.provider_id}"
class GoogleWorkspaceProviderGroup(models.Model):
"""Mapping of a group and provider to a Google group ID"""
id = models.UUIDField(primary_key=True, editable=False, default=uuid4)
google_id = models.TextField()
group = models.ForeignKey(Group, on_delete=models.CASCADE)
provider = models.ForeignKey(GoogleWorkspaceProvider, on_delete=models.CASCADE)
class Meta:
unique_together = (("google_id", "group", "provider"),)
def __str__(self) -> str:
return f"Google Workspace Group {self.group_id} to {self.provider_id}"

View File

@ -1,13 +0,0 @@
"""Google workspace provider task Settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"providers_google_workspace_sync": {
"task": "authentik.enterprise.providers.google_workspace.tasks.google_workspace_sync_all",
"schedule": crontab(minute=fqdn_rand("google_workspace_sync_all"), hour="*/4"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,16 +0,0 @@
"""Google provider signals"""
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.enterprise.providers.google_workspace.tasks import (
google_workspace_sync,
google_workspace_sync_direct,
google_workspace_sync_m2m,
)
from authentik.lib.sync.outgoing.signals import register_signals
register_signals(
GoogleWorkspaceProvider,
task_sync_single=google_workspace_sync,
task_sync_direct=google_workspace_sync_direct,
task_sync_m2m=google_workspace_sync_m2m,
)

View File

@ -1,34 +0,0 @@
"""Google Provider tasks"""
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.events.system_tasks import SystemTask
from authentik.lib.sync.outgoing.tasks import SyncTasks
from authentik.root.celery import CELERY_APP
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
@CELERY_APP.task()
def google_workspace_sync_objects(*args, **kwargs):
return sync_tasks.sync_objects(*args, **kwargs)
@CELERY_APP.task(base=SystemTask, bind=True)
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
"""Run full sync for Google Workspace provider"""
return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
@CELERY_APP.task()
def google_workspace_sync_all():
return sync_tasks.sync_all(google_workspace_sync)
@CELERY_APP.task()
def google_workspace_sync_direct(*args, **kwargs):
return sync_tasks.sync_signal_direct(*args, **kwargs)
@CELERY_APP.task()
def google_workspace_sync_m2m(*args, **kwargs):
return sync_tasks.sync_signal_m2m(*args, **kwargs)

View File

@ -1,14 +0,0 @@
{
"kind": "admin#directory#domains",
"etag": "\"a1kA7zE2sFLsHiFwgXN9G3effoc9grR2OwUu8_95xD4/uvC5HsKHylhnUtnRV6ZxINODtV0\"",
"domains": [
{
"kind": "admin#directory#domain",
"etag": "\"a1kA7zE2sFLsHiFwgXN9G3effoc9grR2OwUu8_95xD4/V4koSPWBFIWuIpAmUamO96QhTLo\"",
"domainName": "goauthentik.io",
"isPrimary": true,
"verified": true,
"creationTime": "1543048869840"
}
]
}

View File

@ -1,313 +0,0 @@
"""Google Workspace Group tests"""
from unittest.mock import MagicMock, patch
from django.test import TestCase
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import Application, Group, User
from authentik.core.tests.utils import create_test_user
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.enterprise.providers.google_workspace.models import (
GoogleWorkspaceDeleteAction,
GoogleWorkspaceProvider,
GoogleWorkspaceProviderGroup,
GoogleWorkspaceProviderMapping,
)
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
from authentik.events.models import Event, EventAction
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.tenants.models import Tenant
domains_list_v1_mock = load_fixture("fixtures/domains_list_v1.json")
class GoogleWorkspaceGroupTests(TestCase):
"""Google workspace Group tests"""
@apply_blueprint("system/providers-google-workspace.yaml")
def setUp(self) -> None:
# Delete all groups and groups as the mocked HTTP responses only return one ID
# which will cause errors with multiple groups
Tenant.objects.update(avatars="none")
User.objects.all().exclude_anonymous().delete()
Group.objects.all().delete()
self.provider: GoogleWorkspaceProvider = GoogleWorkspaceProvider.objects.create(
name=generate_id(),
credentials={},
delegated_subject="",
exclude_users_service_account=True,
default_group_email_domain="goauthentik.io",
)
self.app: Application = Application.objects.create(
name=generate_id(),
slug=generate_id(),
)
self.app.backchannel_providers.add(self.provider)
self.provider.property_mappings.add(
GoogleWorkspaceProviderMapping.objects.get(
managed="goauthentik.io/providers/google_workspace/user"
)
)
self.provider.property_mappings_group.add(
GoogleWorkspaceProviderMapping.objects.get(
managed="goauthentik.io/providers/google_workspace/group"
)
)
self.api_key = generate_id()
def test_group_create(self):
"""Test group creation"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": generate_id()},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
group = Group.objects.create(name=uid)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 2)
def test_group_create_update(self):
"""Test group updating"""
uid = generate_id()
ext_id = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": ext_id},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}?key={self.api_key}&alt=json",
method="PUT",
body={"id": ext_id},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
group = Group.objects.create(name=uid)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
group.name = "new name"
group.save()
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 4)
def test_group_create_delete(self):
"""Test group deletion"""
uid = generate_id()
ext_id = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": ext_id},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}?key={self.api_key}",
method="DELETE",
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
group = Group.objects.create(name=uid)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
group.delete()
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 4)
def test_group_create_member_add(self):
"""Test group creation"""
uid = generate_id()
ext_id = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": ext_id},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
method="PUT",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members?key={self.api_key}&alt=json",
method="POST",
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = create_test_user(uid)
group = Group.objects.create(name=uid)
group.users.add(user)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 8)
def test_group_create_member_remove(self):
"""Test group creation"""
uid = generate_id()
ext_id = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": ext_id},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
method="PUT",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members/{uid}%40goauthentik.io?key={self.api_key}",
method="DELETE",
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{ext_id}/members?key={self.api_key}&alt=json",
method="POST",
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = create_test_user(uid)
group = Group.objects.create(name=uid)
group.users.add(user)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
group.users.remove(user)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 10)
def test_group_create_delete_do_nothing(self):
"""Test group deletion (delete action = do nothing)"""
self.provider.group_delete_action = GoogleWorkspaceDeleteAction.DO_NOTHING
self.provider.save()
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?key={self.api_key}&alt=json",
method="POST",
body={"id": uid},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
group = Group.objects.create(name=uid)
google_group = GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group=group
).first()
self.assertIsNotNone(google_group)
group.delete()
self.assertEqual(len(http.requests()), 3)
self.assertFalse(
GoogleWorkspaceProviderGroup.objects.filter(
provider=self.provider, group__name=uid
).exists()
)
def test_sync_task(self):
"""Test group discovery"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
method="GET",
body={"users": []},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
method="GET",
body={"groups": [{"id": uid, "name": uid}]},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups/{uid}?key={self.api_key}&alt=json",
method="PUT",
body={"id": uid},
)
self.app.backchannel_providers.remove(self.provider)
different_group = Group.objects.create(
name=uid,
)
self.app.backchannel_providers.add(self.provider)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
google_workspace_sync.delay(self.provider.pk).get()
self.assertTrue(
GoogleWorkspaceProviderGroup.objects.filter(
group=different_group, provider=self.provider
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 5)

View File

@ -1,287 +0,0 @@
"""Google Workspace User tests"""
from json import loads
from unittest.mock import MagicMock, patch
from django.test import TestCase
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import Application, Group, User
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.enterprise.providers.google_workspace.models import (
GoogleWorkspaceDeleteAction,
GoogleWorkspaceProvider,
GoogleWorkspaceProviderMapping,
GoogleWorkspaceProviderUser,
)
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
from authentik.events.models import Event, EventAction
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.tenants.models import Tenant
domains_list_v1_mock = load_fixture("fixtures/domains_list_v1.json")
class GoogleWorkspaceUserTests(TestCase):
"""Google workspace User tests"""
@apply_blueprint("system/providers-google-workspace.yaml")
def setUp(self) -> None:
# Delete all users and groups as the mocked HTTP responses only return one ID
# which will cause errors with multiple users
Tenant.objects.update(avatars="none")
User.objects.all().exclude_anonymous().delete()
Group.objects.all().delete()
self.provider: GoogleWorkspaceProvider = GoogleWorkspaceProvider.objects.create(
name=generate_id(),
credentials={},
delegated_subject="",
exclude_users_service_account=True,
default_group_email_domain="goauthentik.io",
)
self.app: Application = Application.objects.create(
name=generate_id(),
slug=generate_id(),
)
self.app.backchannel_providers.add(self.provider)
self.provider.property_mappings.add(
GoogleWorkspaceProviderMapping.objects.get(
managed="goauthentik.io/providers/google_workspace/user"
)
)
self.provider.property_mappings_group.add(
GoogleWorkspaceProviderMapping.objects.get(
managed="goauthentik.io/providers/google_workspace/group"
)
)
self.api_key = generate_id()
def test_user_create(self):
"""Test user creation"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNotNone(google_user)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 2)
def test_user_create_update(self):
"""Test user updating"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
method="PUT",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNotNone(google_user)
user.name = "new name"
user.save()
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 4)
def test_user_create_delete(self):
"""Test user deletion"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}",
method="DELETE",
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNotNone(google_user)
user.delete()
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 4)
def test_user_create_delete_suspend(self):
"""Test user deletion (delete action = Suspend)"""
self.provider.user_delete_action = GoogleWorkspaceDeleteAction.SUSPEND
self.provider.save()
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
method="PUT",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNotNone(google_user)
user.delete()
self.assertEqual(len(http.requests()), 4)
_, _, body, _ = http.requests()[3]
self.assertEqual(
loads(body),
{
"suspended": True,
},
)
self.assertFalse(
GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user__username=uid
).exists()
)
def test_user_create_delete_do_nothing(self):
"""Test user deletion (delete action = do nothing)"""
self.provider.user_delete_action = GoogleWorkspaceDeleteAction.DO_NOTHING
self.provider.save()
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?key={self.api_key}&alt=json",
method="POST",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
google_user = GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user=user
).first()
self.assertIsNotNone(google_user)
user.delete()
self.assertEqual(len(http.requests()), 3)
self.assertFalse(
GoogleWorkspaceProviderUser.objects.filter(
provider=self.provider, user__username=uid
).exists()
)
def test_sync_task(self):
"""Test user discovery"""
uid = generate_id()
http = MockHTTP()
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/customer/my_customer/domains?key={self.api_key}&alt=json",
domains_list_v1_mock,
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
method="GET",
body={"users": [{"primaryEmail": f"{uid}@goauthentik.io"}]},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
method="GET",
body={"groups": []},
)
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users/{uid}%40goauthentik.io?key={self.api_key}&alt=json",
method="PUT",
body={"primaryEmail": f"{uid}@goauthentik.io"},
)
self.app.backchannel_providers.remove(self.provider)
different_user = User.objects.create(
username=uid,
email=f"{uid}@goauthentik.io",
)
self.app.backchannel_providers.add(self.provider)
with patch(
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
google_workspace_sync.delay(self.provider.pk).get()
self.assertTrue(
GoogleWorkspaceProviderUser.objects.filter(
user=different_user, provider=self.provider
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 5)

View File

@ -1,11 +0,0 @@
"""google provider urls"""
from authentik.enterprise.providers.google_workspace.api.property_mappings import (
GoogleProviderMappingViewSet,
)
from authentik.enterprise.providers.google_workspace.api.providers import GoogleProviderViewSet
api_urlpatterns = [
("providers/google_workspace", GoogleProviderViewSet),
("propertymappings/provider/google_workspace", GoogleProviderMappingViewSet),
]

View File

@ -6,13 +6,13 @@ from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import GenericViewSet
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.groups import GroupMemberSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint
class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
@ -23,7 +23,7 @@ class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
user = GroupMemberSerializer(source="session.user", read_only=True)
class Meta:
model = ConnectionToken
model = Endpoint
fields = [
"pk",
"provider",
@ -49,5 +49,5 @@ class ConnectionTokenViewSet(
filterset_fields = ["endpoint", "session__user", "provider"]
search_fields = ["endpoint__name", "provider__name"]
ordering = ["endpoint__name", "provider__name"]
permission_classes = [OwnerSuperuserPermissions]
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]

View File

@ -1,18 +0,0 @@
# Generated by Django 5.0.2 on 2024-02-29 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_providers_rac", "0001_squashed_0003_alter_connectiontoken_options_and_more"),
]
operations = [
migrations.AlterField(
model_name="connectiontoken",
name="expires",
field=models.DateTimeField(default=None, null=True),
),
]

View File

@ -11,7 +11,7 @@ from django.utils.translation import gettext as _
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik.core.expression.exceptions import PropertyMappingExpressionException
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User, default_token_key
from authentik.events.models import Event, EventAction
from authentik.lib.models import SerializerModel
@ -201,7 +201,10 @@ class ConnectionToken(ExpiringModel):
return settings
def __str__(self):
return f"RAC Connection token {self.session_id} to {self.provider_id}/{self.endpoint_id}"
return (
f"RAC Connection token {self.session.user} to "
f"{self.endpoint.provider.name}/{self.endpoint.name}"
)
class Meta:
verbose_name = _("RAC Connection token")

View File

@ -14,9 +14,7 @@ CELERY_BEAT_SCHEDULE = {
TENANT_APPS = [
"authentik.enterprise.audit",
"authentik.enterprise.providers.google_workspace",
"authentik.enterprise.providers.rac",
"authentik.enterprise.stages.source",
]
MIDDLEWARE = ["authentik.enterprise.middleware.EnterpriseMiddleware"]

View File

@ -1,38 +0,0 @@
"""Source Stage API Views"""
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.core.models import Source
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.stages.source.models import SourceStage
from authentik.flows.api.stages import StageSerializer
class SourceStageSerializer(EnterpriseRequiredMixin, StageSerializer):
"""SourceStage Serializer"""
def validate_source(self, _source: Source) -> Source:
"""Ensure configured source supports web-based login"""
source = Source.objects.filter(pk=_source.pk).select_subclasses().first()
if not source:
raise ValidationError("Invalid source")
login_button = source.ui_login_button(self.context["request"])
if not login_button:
raise ValidationError("Invalid source selected, only web-based sources are supported.")
return source
class Meta:
model = SourceStage
fields = StageSerializer.Meta.fields + ["source", "resume_timeout"]
class SourceStageViewSet(UsedByMixin, ModelViewSet):
"""SourceStage Viewset"""
queryset = SourceStage.objects.all()
serializer_class = SourceStageSerializer
filterset_fields = "__all__"
ordering = ["name"]
search_fields = ["name"]

View File

@ -1,12 +0,0 @@
"""authentik stage app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseStageSourceConfig(EnterpriseConfig):
"""authentik source stage config"""
name = "authentik.enterprise.stages.source"
label = "authentik_stages_source"
verbose_name = "authentik Enterprise.Stages.Source"
default = True

View File

@ -1,53 +0,0 @@
# Generated by Django 5.0.2 on 2024-02-25 20:44
import authentik.lib.utils.time
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_core", "0033_alter_user_options"),
("authentik_flows", "0027_auto_20231028_1424"),
]
operations = [
migrations.CreateModel(
name="SourceStage",
fields=[
(
"stage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_flows.stage",
),
),
(
"resume_timeout",
models.TextField(
default="minutes=10",
help_text="Amount of time a user can take to return from the source to continue the flow (Format: hours=-1;minutes=-2;seconds=-3)",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
(
"source",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.source"
),
),
],
options={
"verbose_name": "Source Stage",
"verbose_name_plural": "Source Stages",
},
bases=("authentik_flows.stage",),
),
]

View File

@ -1,45 +0,0 @@
"""Source stage models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.views import View
from rest_framework.serializers import BaseSerializer
from authentik.flows.models import Stage
from authentik.lib.utils.time import timedelta_string_validator
class SourceStage(Stage):
"""Suspend the current flow execution and send the user to a source,
after which this flow execution is resumed."""
source = models.ForeignKey("authentik_core.Source", on_delete=models.CASCADE)
resume_timeout = models.TextField(
default="minutes=10",
validators=[timedelta_string_validator],
help_text=_(
"Amount of time a user can take to return from the source to continue the flow "
"(Format: hours=-1;minutes=-2;seconds=-3)"
),
)
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.stages.source.api import SourceStageSerializer
return SourceStageSerializer
@property
def view(self) -> type[View]:
from authentik.enterprise.stages.source.stage import SourceStageView
return SourceStageView
@property
def component(self) -> str:
return "ak-stage-source-form"
class Meta:
verbose_name = _("Source Stage")
verbose_name_plural = _("Source Stages")

View File

@ -1,79 +0,0 @@
"""Source stage logic"""
from typing import Any
from uuid import uuid4
from django.http import HttpRequest, HttpResponse
from django.utils.text import slugify
from django.utils.timezone import now
from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Source, User
from authentik.core.sources.flow_manager import SESSION_KEY_OVERRIDE_FLOW_TOKEN
from authentik.core.types import UILoginButton
from authentik.enterprise.stages.source.models import SourceStage
from authentik.flows.challenge import Challenge, ChallengeResponse
from authentik.flows.models import FlowToken
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
from authentik.flows.stage import ChallengeStageView
from authentik.lib.utils.time import timedelta_from_string
PLAN_CONTEXT_RESUME_TOKEN = "resume_token" # nosec
class SourceStageView(ChallengeStageView):
"""Suspend the current flow execution and send the user to a source,
after which this flow execution is resumed."""
login_button: UILoginButton
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
current_stage: SourceStage = self.executor.current_stage
source: Source = (
Source.objects.filter(pk=current_stage.source_id).select_subclasses().first()
)
if not source:
self.logger.warning("Source does not exist")
return self.executor.stage_invalid("Source does not exist")
self.login_button = source.ui_login_button(self.request)
if not self.login_button:
self.logger.warning("Source does not have a UI login button")
return self.executor.stage_invalid("Invalid source")
restore_token = self.executor.plan.context.get(PLAN_CONTEXT_IS_RESTORED)
override_token = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
if restore_token and override_token and restore_token.pk == override_token.pk:
del self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN]
return self.executor.stage_ok()
return super().dispatch(request, *args, **kwargs)
def get_challenge(self, *args, **kwargs) -> Challenge:
resume_token = self.create_flow_token()
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
return self.login_button.challenge
def create_flow_token(self) -> FlowToken:
"""Save the current flow state in a token that can be used to resume this flow"""
pending_user: User = self.get_pending_user()
if pending_user.is_anonymous:
pending_user = get_anonymous_user()
current_stage: SourceStage = self.executor.current_stage
identifier = slugify(f"ak-source-stage-{current_stage.name}-{str(uuid4())}")
# Don't check for validity here, we only care if the token exists
tokens = FlowToken.objects.filter(identifier=identifier)
valid_delta = timedelta_from_string(current_stage.resume_timeout)
if not tokens.exists():
return FlowToken.objects.create(
expires=now() + valid_delta,
user=pending_user,
identifier=identifier,
flow=self.executor.flow,
_plan=FlowToken.pickle(self.executor.plan),
)
token = tokens.first()
# Check if token is expired and rotate key if so
if token.is_expired:
token.expire_action()
return token
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
return self.executor.stage_ok()

View File

@ -1,99 +0,0 @@
"""Source stage tests"""
from django.urls import reverse
from authentik.core.tests.utils import create_test_flow, create_test_user
from authentik.enterprise.stages.source.models import SourceStage
from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, FlowPlan
from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id
from authentik.sources.saml.models import SAMLSource
from authentik.stages.identification.models import IdentificationStage, UserFields
from authentik.stages.password import BACKEND_INBUILT
from authentik.stages.password.models import PasswordStage
from authentik.stages.user_login.models import UserLoginStage
class TestSourceStage(FlowTestCase):
"""Source stage tests"""
def setUp(self):
self.source = SAMLSource.objects.create(
slug=generate_id(),
issuer="authentik",
allow_idp_initiated=True,
pre_authentication_flow=create_test_flow(),
)
def test_source_success(self):
"""Test"""
user = create_test_user()
flow = create_test_flow(FlowDesignation.AUTHENTICATION)
stage = SourceStage.objects.create(name=generate_id(), source=self.source)
FlowStageBinding.objects.create(
target=flow,
stage=IdentificationStage.objects.create(
name=generate_id(),
user_fields=[UserFields.USERNAME],
),
order=0,
)
FlowStageBinding.objects.create(
target=flow,
stage=PasswordStage.objects.create(name=generate_id(), backends=[BACKEND_INBUILT]),
order=5,
)
FlowStageBinding.objects.create(target=flow, stage=stage, order=10)
FlowStageBinding.objects.create(
target=flow,
stage=UserLoginStage.objects.create(
name=generate_id(),
),
order=15,
)
# Get user identification stage
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertEqual(response.status_code, 200)
self.assertStageResponse(response, flow, component="ak-stage-identification")
# Send username
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
data={"uid_field": user.username},
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertStageResponse(response, flow, component="ak-stage-password")
# Send password
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
data={"password": user.username},
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertStageRedirects(
response,
reverse("authentik_sources_saml:login", kwargs={"source_slug": self.source.slug}),
)
# Hijack flow plan so we don't have to emulate the source
flow_token = FlowToken.objects.filter(
identifier__startswith=f"ak-source-stage-{stage.name.lower()}"
).first()
self.assertIsNotNone(flow_token)
session = self.client.session
plan: FlowPlan = session[SESSION_KEY_PLAN]
plan.context[PLAN_CONTEXT_IS_RESTORED] = flow_token
session[SESSION_KEY_PLAN] = plan
session.save()
# Pretend we've just returned from the source
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}), follow=True
)
self.assertEqual(response.status_code, 200)
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))

View File

@ -1,5 +0,0 @@
"""API URLs"""
from authentik.enterprise.stages.source.api import SourceStageViewSet
api_urlpatterns = [("stages/source", SourceStageViewSet)]

View File

@ -12,6 +12,7 @@ from rest_framework.fields import (
ChoiceField,
DateTimeField,
FloatField,
ListField,
SerializerMethodField,
)
from rest_framework.request import Request
@ -20,7 +21,6 @@ from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ReadOnlyModelViewSet
from structlog.stdlib import get_logger
from authentik.events.logs import LogEventSerializer
from authentik.events.models import SystemTask, TaskStatus
from authentik.rbac.decorators import permission_required
@ -39,7 +39,7 @@ class SystemTaskSerializer(ModelSerializer):
duration = FloatField(read_only=True)
status = ChoiceField(choices=[(x.value, x.name) for x in TaskStatus])
messages = LogEventSerializer(many=True)
messages = ListField(child=CharField())
def get_full_name(self, instance: SystemTask) -> str:
"""Get full name with UID"""
@ -60,8 +60,6 @@ class SystemTaskSerializer(ModelSerializer):
"duration",
"status",
"messages",
"expires",
"expiring",
]

View File

@ -1,82 +0,0 @@
from collections.abc import Generator
from contextlib import contextmanager
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any
from django.utils.timezone import now
from rest_framework.fields import CharField, ChoiceField, DateTimeField, DictField
from structlog import configure, get_config
from structlog.stdlib import NAME_TO_LEVEL, ProcessorFormatter
from structlog.testing import LogCapture
from structlog.types import EventDict
from authentik.core.api.utils import PassiveSerializer
from authentik.events.utils import sanitize_dict
@dataclass()
class LogEvent:
event: str
log_level: str
logger: str
timestamp: datetime = field(default_factory=now)
attributes: dict[str, Any] = field(default_factory=dict)
@staticmethod
def from_event_dict(item: EventDict) -> "LogEvent":
event = item.pop("event")
log_level = item.pop("level").lower()
timestamp = datetime.fromisoformat(item.pop("timestamp"))
item.pop("pid", None)
# Sometimes log entries have both `level` and `log_level` set, but `level` is always set
item.pop("log_level", None)
return LogEvent(
event, log_level, item.pop("logger"), timestamp, attributes=sanitize_dict(item)
)
class LogEventSerializer(PassiveSerializer):
"""Single log message with all context logged."""
timestamp = DateTimeField()
log_level = ChoiceField(choices=tuple((x, x) for x in NAME_TO_LEVEL.keys()))
logger = CharField()
event = CharField()
attributes = DictField()
# TODO(2024.6?): This is a migration helper to return a correct API response for logs that
# have been saved in an older format (mostly just list[str] with just the messages)
def to_representation(self, instance):
if isinstance(instance, str):
instance = LogEvent(instance, "", "")
elif isinstance(instance, list):
instance = [LogEvent(x, "", "") for x in instance]
return super().to_representation(instance)
@contextmanager
def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]:
"""Capture log entries created"""
logs = []
cap = LogCapture()
# Modify `_Configuration.default_processors` set via `configure` but always
# keep the list instance intact to not break references held by bound
# loggers.
processors: list = get_config()["processors"]
old_processors = processors.copy()
try:
# clear processors list and use LogCapture for testing
if ProcessorFormatter.wrap_for_formatter in processors:
processors.remove(ProcessorFormatter.wrap_for_formatter)
processors.append(cap)
configure(processors=processors)
yield logs
for raw_log in cap.entries:
logs.append(LogEvent.from_event_dict(raw_log))
finally:
# remove LogCapture and restore original processors
processors.clear()
processors.extend(old_processors)
configure(processors=processors)

View File

@ -1,8 +1,6 @@
"""Events middleware"""
from collections.abc import Callable
from contextlib import contextmanager
from contextvars import ContextVar
from functools import partial
from threading import Thread
from typing import Any
@ -33,9 +31,6 @@ IGNORED_MODELS = tuple(
)
)
_CTX_OVERWRITE_USER = ContextVar[User | None]("authentik_events_log_overwrite_user", default=None)
_CTX_IGNORE = ContextVar[bool]("authentik_events_log_ignore", default=False)
def should_log_model(model: Model) -> bool:
"""Return true if operation on `model` should be logged"""
@ -49,28 +44,6 @@ def should_log_m2m(model: Model) -> bool:
return False
@contextmanager
def audit_overwrite_user(user: User):
"""Overwrite user being logged for model AuditMiddleware. Commonly used
for example in flows where a pending user is given, but the request is not authenticated yet"""
_CTX_OVERWRITE_USER.set(user)
try:
yield
finally:
_CTX_OVERWRITE_USER.set(None)
@contextmanager
def audit_ignore():
"""Ignore model operations in the block. Useful for objects which need to be modified
but are not excluded (e.g. WebAuthn devices)"""
_CTX_IGNORE.set(True)
try:
yield
finally:
_CTX_IGNORE.set(False)
class EventNewThread(Thread):
"""Create Event in background thread"""
@ -110,32 +83,26 @@ class AuditMiddleware:
self.anonymous_user = get_anonymous_user()
def get_user(self, request: HttpRequest) -> User:
user = _CTX_OVERWRITE_USER.get()
if user:
return user
user = getattr(request, "user", self.anonymous_user)
if not user.is_authenticated:
self._ensure_fallback_user()
return self.anonymous_user
return user
def connect(self, request: HttpRequest):
"""Connect signal for automatic logging"""
self._ensure_fallback_user()
user = getattr(request, "user", self.anonymous_user)
if not user.is_authenticated:
user = self.anonymous_user
if not hasattr(request, "request_id"):
return
post_save.connect(
partial(self.post_save_handler, request=request),
partial(self.post_save_handler, user=user, request=request),
dispatch_uid=request.request_id,
weak=False,
)
pre_delete.connect(
partial(self.pre_delete_handler, request=request),
partial(self.pre_delete_handler, user=user, request=request),
dispatch_uid=request.request_id,
weak=False,
)
m2m_changed.connect(
partial(self.m2m_changed_handler, request=request),
partial(self.m2m_changed_handler, user=user, request=request),
dispatch_uid=request.request_id,
weak=False,
)
@ -180,6 +147,7 @@ class AuditMiddleware:
def post_save_handler(
self,
user: User,
request: HttpRequest,
sender,
instance: Model,
@ -190,22 +158,16 @@ class AuditMiddleware:
"""Signal handler for all object's post_save"""
if not should_log_model(instance):
return
if _CTX_IGNORE.get():
return
user = self.get_user(request)
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
thread = EventNewThread(action, request, user=user, model=model_to_dict(instance))
thread.kwargs.update(thread_kwargs or {})
thread.run()
def pre_delete_handler(self, request: HttpRequest, sender, instance: Model, **_):
def pre_delete_handler(self, user: User, request: HttpRequest, sender, instance: Model, **_):
"""Signal handler for all object's pre_delete"""
if not should_log_model(instance): # pragma: no cover
return
if _CTX_IGNORE.get():
return
user = self.get_user(request)
EventNewThread(
EventAction.MODEL_DELETED,
@ -215,27 +177,17 @@ class AuditMiddleware:
).run()
def m2m_changed_handler(
self,
request: HttpRequest,
sender,
instance: Model,
action: str,
thread_kwargs: dict | None = None,
**_,
self, user: User, request: HttpRequest, sender, instance: Model, action: str, **_
):
"""Signal handler for all object's m2m_changed"""
if action not in ["pre_add", "pre_remove", "post_clear"]:
return
if not should_log_m2m(instance):
return
if _CTX_IGNORE.get():
return
user = self.get_user(request)
EventNewThread(
EventAction.MODEL_UPDATED,
request,
user=user,
model=model_to_dict(instance),
**thread_kwargs,
).run()

View File

@ -1,21 +0,0 @@
# Generated by Django 5.0.2 on 2024-02-29 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_events",
"0004_systemtask_squashed_0005_remove_systemtask_finish_timestamp_and_more",
),
]
operations = [
migrations.AlterField(
model_name="systemtask",
name="expires",
field=models.DateTimeField(default=None, null=True),
),
]

View File

@ -1,39 +0,0 @@
# Generated by Django 5.0.4 on 2024-04-15 16:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_events", "0006_alter_systemtask_expires"),
]
operations = [
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["action"], name="authentik_e_action_9a9dd9_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["user"], name="authentik_e_user_1be48d_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["app"], name="authentik_e_app_6a05ce_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["created"], name="authentik_e_created_6f0834_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["client_ip"], name="authentik_e_client__51f4dd_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(
models.F("context__authorized_application"), name="authentik_e_ctx_app__idx"
),
),
]

View File

@ -305,16 +305,6 @@ class Event(SerializerModel, ExpiringModel):
class Meta:
verbose_name = _("Event")
verbose_name_plural = _("Events")
indexes = [
models.Index(fields=["action"]),
models.Index(fields=["user"]),
models.Index(fields=["app"]),
models.Index(fields=["created"]),
models.Index(fields=["client_ip"]),
models.Index(
models.F("context__authorized_application"), name="authentik_e_ctx_app__idx"
),
]
class TransportMode(models.TextChoices):
@ -462,13 +452,6 @@ class NotificationTransport(SerializerModel):
def send_email(self, notification: "Notification") -> list[str]:
"""Send notification via global email configuration"""
if notification.user.email.strip() == "":
LOGGER.info(
"Discarding notification as user has no email address",
user=notification.user,
notification=notification,
)
return None
subject_prefix = "authentik Notification: "
context = {
"key_value": {
@ -556,7 +539,7 @@ class Notification(SerializerModel):
if len(self.body) > NOTIFICATION_SUMMARY_LENGTH
else self.body
)
return f"Notification for user {self.user_id}: {body_trunc}"
return f"Notification for user {self.user}: {body_trunc}"
class Meta:
verbose_name = _("Notification")

Some files were not shown because too many files have changed in this diff Show More