Compare commits

..

5 Commits

Author SHA1 Message Date
8543e140ef stages/consent: fix error when requests with identical empty permissions
closes #3280

Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2022-08-01 20:58:25 +02:00
e2cf578afd root: use updated schema
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2022-08-01 10:11:06 +02:00
0ce9fd9b2e web: fix updates
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2022-08-01 09:59:57 +02:00
d17ad65435 web: bump api client to match
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2022-07-31 23:54:29 +02:00
01529d3894 flows/stages/consent: fix for post requests (#3339)
add unique token to consent stage to ensure it is shown

Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2022-07-31 23:53:06 +02:00
389 changed files with 6084 additions and 10035 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 2022.8.2 current_version = 2022.7.3
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+) parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -1,92 +0,0 @@
name: 'Comment usage instructions on PRs'
description: 'Comment usage instructions on PRs'
inputs:
tag:
description: "Image tag to pull"
required: true
runs:
using: "composite"
steps:
- name: Generate config
id: ev
shell: python
run: |
"""Helper script to get the actual branch name, docker safe"""
import os
from time import time
env_pr_branch = "GITHUB_HEAD_REF"
default_branch = "GITHUB_REF"
sha = "GITHUB_SHA"
branch_name = os.environ[default_branch]
if os.environ.get(env_pr_branch, "") != "":
branch_name = os.environ[env_pr_branch]
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
print("##[set-output name=branchName]%s" % branch_name)
print(
"##[set-output name=branchNameContainer]%s"
% branch_name.replace("refs/heads/", "").replace("/", "-")
)
print("##[set-output name=timestamp]%s" % int(time()))
print("##[set-output name=sha]%s" % os.environ[sha])
print("##[set-output name=shouldBuild]%s" % should_build)
import configparser
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
version = parser.get("bumpversion", "current_version")
version_family = ".".join(version.split(".")[:-1])
print("##[set-output name=version]%s" % version)
print("##[set-output name=versionFamily]%s" % version_family)
- name: Find Comment
uses: peter-evans/find-comment@v2
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: authentik PR Installation instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body: |
authentik PR Installation instructions
<details>
<summary>Instructions for docker-compose</summary>
Add the following block to your `.env` file:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
<summary>Instructions for Kubernetes</summary>
Add the following block to your `values.yml` file:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}
# pullPolicy: Always to ensure you always get the latest version
pullPolicy: Always
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -235,9 +235,3 @@ jobs:
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }} platforms: ${{ matrix.arch }}
- name: Comment on PR
if: github.event_name == 'pull_request'
continue-on-error: true
uses: ./.github/actions/comment-pr-instructions
with:
tag: gh-${{ steps.ev.outputs.branchNameContainer }}

View File

@ -138,7 +138,7 @@ jobs:
docker-compose pull -q docker-compose pull -q
docker-compose up --no-start docker-compose up --no-start
docker-compose start postgresql redis docker-compose start postgresql redis
docker-compose run -u root server test-all docker-compose run -u root server test
sentry-release: sentry-release:
needs: needs:
- build-server - build-server
@ -157,7 +157,6 @@ jobs:
docker cp ${container}:web/ . docker cp ${container}:web/ .
- name: Create a Sentry.io release - name: Create a Sentry.io release
uses: getsentry/action-release@v1 uses: getsentry/action-release@v1
continue-on-error: true
if: ${{ github.event_name == 'release' }} if: ${{ github.event_name == 'release' }}
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@ -16,12 +16,15 @@ jobs:
echo "PG_PASS=$(openssl rand -base64 32)" >> .env echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install docker buildx install
docker build -t testing:latest . docker build \
--no-cache \
-t testing:latest \
-f Dockerfile .
echo "AUTHENTIK_IMAGE=testing" >> .env echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env echo "AUTHENTIK_TAG=latest" >> .env
docker-compose up --no-start docker-compose up --no-start
docker-compose start postgresql redis docker-compose start postgresql redis
docker-compose run -u root server test-all docker-compose run -u root server test
- name: Extract version number - name: Extract version number
id: get_version id: get_version
uses: actions/github-script@v6 uses: actions/github-script@v6

View File

@ -35,8 +35,8 @@ jobs:
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
branch: update-web-api-client branch: update-web-api-client
commit-message: "web: bump API Client version" commit-message: "web: Update Web API Client version"
title: "web: bump API Client version" title: "web: Update Web API Client version"
body: "web: bump API Client version" body: "web: Update Web API Client version"
delete-branch: true delete-branch: true
signoff: true signoff: true

12
.vscode/settings.json vendored
View File

@ -20,15 +20,11 @@
"todo-tree.tree.showCountsInTree": true, "todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true, "todo-tree.tree.showBadges": true,
"python.formatting.provider": "black", "python.formatting.provider": "black",
"yaml.customTags": [ "files.associations": {
"!Find sequence", "*.akflow": "json"
"!KeyOf scalar" },
],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index", "typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./web/node_modules/typescript/lib", "typescript.tsdk": "./web/node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true, "typescript.enablePromptUseWorkspaceTsdk": true
"yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml"
}
} }

View File

@ -2,7 +2,6 @@
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
COPY ./website /work/website/ COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
ENV NODE_ENV=production ENV NODE_ENV=production
WORKDIR /work/website WORKDIR /work/website
@ -19,7 +18,7 @@ WORKDIR /work/web
RUN npm ci && npm run build RUN npm ci && npm run build
# Stage 3: Poetry to requirements.txt export # Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.10.6-slim-bullseye AS poetry-locker FROM docker.io/python:3.10.5-slim-bullseye AS poetry-locker
WORKDIR /work WORKDIR /work
COPY ./pyproject.toml /work COPY ./pyproject.toml /work
@ -30,7 +29,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy # Stage 4: Build go proxy
FROM docker.io/golang:1.19.0-bullseye AS go-builder FROM docker.io/golang:1.18.4-bullseye AS builder
WORKDIR /work WORKDIR /work
@ -46,7 +45,7 @@ COPY ./go.sum /work/go.sum
RUN go build -o /work/authentik ./cmd/server/main.go RUN go build -o /work/authentik ./cmd/server/main.go
# Stage 5: Run # Stage 5: Run
FROM docker.io/python:3.10.6-slim-bullseye AS final-image FROM docker.io/python:3.10.5-slim-bullseye
LABEL org.opencontainers.image.url https://goauthentik.io LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
@ -73,7 +72,7 @@ RUN apt-get update && \
apt-get clean && \ apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
mkdir -p /certs /media /blueprints && \ mkdir -p /certs /media && \
mkdir -p /authentik/.ssh && \ mkdir -p /authentik/.ssh && \
chown authentik:authentik /certs /media /authentik/.ssh chown authentik:authentik /certs /media /authentik/.ssh
@ -82,14 +81,13 @@ COPY ./pyproject.toml /
COPY ./xml /xml COPY ./xml /xml
COPY ./tests /tests COPY ./tests /tests
COPY ./manage.py / COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /work/authentik /authentik-proxy COPY --from=builder /work/authentik /authentik-proxy
COPY --from=web-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/ COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/help/ /website/help/ COPY --from=website-builder /work/website/help/ /website/help/
USER 1000 USER authentik
ENV TMPDIR /dev/shm/ ENV TMPDIR /dev/shm/
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
@ -97,4 +95,4 @@ ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ] HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ] ENTRYPOINT [ "/lifecycle/ak" ]

View File

@ -33,8 +33,8 @@ test:
coverage report coverage report
lint-fix: lint-fix:
isort authentik tests scripts lifecycle isort authentik tests lifecycle
black authentik tests scripts lifecycle black authentik tests lifecycle
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \ codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
authentik \ authentik \
internal \ internal \
@ -52,11 +52,10 @@ lint:
i18n-extract: i18n-extract-core web-extract i18n-extract: i18n-extract-core web-extract
i18n-extract-core: i18n-extract-core:
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en ./manage.py makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
gen-build: gen-build:
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json AUTHENTIK_DEBUG=true ./manage.py spectacular --file schema.yml
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
gen-clean: gen-clean:
rm -rf web/api/src/ rm -rf web/api/src/
@ -92,9 +91,6 @@ gen-client-go:
go mod edit -replace goauthentik.io/api/v3=./gen-go-api go mod edit -replace goauthentik.io/api/v3=./gen-go-api
rm -rf config.yaml ./templates/ rm -rf config.yaml ./templates/
gen-dev-config:
python -m scripts.generate_config
gen: gen-build gen-clean gen-client-web gen: gen-build gen-clean gen-client-web
migrate: migrate:
@ -169,16 +165,7 @@ ci-pyright: ci--meta-debug
pyright e2e lifecycle pyright e2e lifecycle
ci-pending-migrations: ci--meta-debug ci-pending-migrations: ci--meta-debug
ak makemigrations --check ./manage.py makemigrations --check
install: web-install website-install install: web-install website-install
poetry install poetry install
dev-reset:
dropdb -U postgres -h localhost authentik
createdb -U postgres -h localhost authentik
redis-cli -n 0 flushall
redis-cli -n 1 flushall
redis-cli -n 2 flushall
redis-cli -n 3 flushall
make migrate

View File

@ -6,9 +6,9 @@
| Version | Supported | | Version | Supported |
| ---------- | ------------------ | | ---------- | ------------------ |
| 2022.5.x | :white_check_mark: |
| 2022.6.x | :white_check_mark: | | 2022.6.x | :white_check_mark: |
| 2022.7.x | :white_check_mark: | | 2022.7.x | :white_check_mark: |
| 2022.8.x | :white_check_mark: |
## Reporting a Vulnerability ## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ from os import environ
from typing import Optional from typing import Optional
__version__ = "2022.8.2" __version__ = "2022.7.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -16,7 +16,7 @@ from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.managed import MANAGED_OUTPOST
from authentik.outposts.models import Outpost from authentik.outposts.models import Outpost

View File

@ -1,20 +1,19 @@
"""authentik admin app config""" """authentik admin app config"""
from prometheus_client import Gauge, Info from importlib import import_module
from authentik.blueprints.manager import ManagedAppConfig from django.apps import AppConfig
from prometheus_client import Gauge, Info
PROM_INFO = Info("authentik_version", "Currently running authentik version") PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers") GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig): class AuthentikAdminConfig(AppConfig):
"""authentik admin app config""" """authentik admin app config"""
name = "authentik.admin" name = "authentik.admin"
label = "authentik_admin" label = "authentik_admin"
verbose_name = "authentik Admin" verbose_name = "authentik Admin"
default = True
def reconcile_load_admin_signals(self): def ready(self):
"""Load admin signals""" import_module("authentik.admin.signals")
self.import_module("authentik.admin.signals")

View File

@ -3,7 +3,6 @@ import re
from django.core.cache import cache from django.core.cache import cache
from django.core.validators import URLValidator from django.core.validators import URLValidator
from django.db import DatabaseError, InternalError, ProgrammingError
from packaging.version import parse from packaging.version import parse
from requests import RequestException from requests import RequestException
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
@ -40,9 +39,7 @@ def _set_prom_info():
) )
@CELERY_APP.task( @CELERY_APP.task()
throws=(DatabaseError, ProgrammingError, InternalError),
)
def clear_update_notifications(): def clear_update_notifications():
"""Clear update notifications on startup if the notification was for the version """Clear update notifications on startup if the notification was for the version
we're running now.""" we're running now."""

View File

@ -5,10 +5,10 @@ from django.test import TestCase
from django.urls import reverse from django.urls import reverse
from authentik import __version__ from authentik import __version__
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Group, User from authentik.core.models import Group, User
from authentik.core.tasks import clean_expired_models from authentik.core.tasks import clean_expired_models
from authentik.events.monitored_tasks import TaskResultStatus from authentik.events.monitored_tasks import TaskResultStatus
from authentik.managed.tasks import managed_reconcile
class TestAdminAPI(TestCase): class TestAdminAPI(TestCase):
@ -93,8 +93,9 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:apps-list")) response = self.client.get(reverse("authentik_api:apps-list"))
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
@reconcile_app("authentik_outposts")
def test_system(self): def test_system(self):
"""Test system API""" """Test system API"""
# pyright: reportGeneralTypeIssues=false
managed_reconcile() # pylint: disable=no-value-for-parameter
response = self.client.get(reverse("authentik_api:admin_system")) response = self.client.get(reverse("authentik_api:admin_system"))
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)

View File

@ -7,7 +7,7 @@ from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request from rest_framework.request import Request
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA from authentik.core.middleware import KEY_AUTH_VIA, LOCAL
from authentik.core.models import Token, TokenIntents, User from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
@ -36,12 +36,14 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
auth_credentials = validate_auth(raw_header) auth_credentials = validate_auth(raw_header)
if not auth_credentials: if not auth_credentials:
return None return None
if not hasattr(LOCAL, "authentik"):
LOCAL.authentik = {}
# first, check traditional tokens # first, check traditional tokens
key_token = Token.filter_not_expired( key_token = Token.filter_not_expired(
key=auth_credentials, intent=TokenIntents.INTENT_API key=auth_credentials, intent=TokenIntents.INTENT_API
).first() ).first()
if key_token: if key_token:
CTX_AUTH_VIA.set("api_token") LOCAL.authentik[KEY_AUTH_VIA] = "api_token"
return key_token.user return key_token.user
# then try to auth via JWT # then try to auth via JWT
jwt_token = RefreshToken.filter_not_expired( jwt_token = RefreshToken.filter_not_expired(
@ -52,12 +54,12 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
# we want to check the parsed version too # we want to check the parsed version too
if SCOPE_AUTHENTIK_API not in jwt_token.scope: if SCOPE_AUTHENTIK_API not in jwt_token.scope:
raise AuthenticationFailed("Token invalid/expired") raise AuthenticationFailed("Token invalid/expired")
CTX_AUTH_VIA.set("jwt") LOCAL.authentik[KEY_AUTH_VIA] = "jwt"
return jwt_token.user return jwt_token.user
# then try to auth via secret key (for embedded outpost/etc) # then try to auth via secret key (for embedded outpost/etc)
user = token_secret_key(auth_credentials) user = token_secret_key(auth_credentials)
if user: if user:
CTX_AUTH_VIA.set("secret_key") LOCAL.authentik[KEY_AUTH_VIA] = "secret_key"
return user return user
raise AuthenticationFailed("Token invalid/expired") raise AuthenticationFailed("Token invalid/expired")
@ -65,7 +67,7 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
def token_secret_key(value: str) -> Optional[User]: def token_secret_key(value: str) -> Optional[User]:
"""Check if the token is the secret key """Check if the token is the secret key
and return the service account for the managed outpost""" and return the service account for the managed outpost"""
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.managed import MANAGED_OUTPOST
if value != settings.SECRET_KEY: if value != settings.SECRET_KEY:
return None return None

View File

@ -7,10 +7,10 @@ from guardian.shortcuts import get_anonymous_user
from rest_framework.exceptions import AuthenticationFailed from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_flow from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.outposts.managed import OutpostManager
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
@ -42,11 +42,9 @@ class TestAPIAuth(TestCase):
def test_managed_outpost(self): def test_managed_outpost(self):
"""Test managed outpost""" """Test managed outpost"""
with self.assertRaises(AuthenticationFailed): with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode()) user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
@reconcile_app("authentik_outposts") OutpostManager().run()
def test_managed_outpost_success(self):
"""Test managed outpost"""
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode()) user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True) self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)

View File

@ -68,9 +68,10 @@ class ConfigView(APIView):
caps.append(Capabilities.CAN_IMPERSONATE) caps.append(Capabilities.CAN_IMPERSONATE)
return caps return caps
def get_config(self) -> ConfigSerializer: @extend_schema(responses={200: ConfigSerializer(many=False)})
"""Get Config""" def get(self, request: Request) -> Response:
return ConfigSerializer( """Retrieve public configuration options"""
config = ConfigSerializer(
{ {
"error_reporting": { "error_reporting": {
"enabled": CONFIG.y("error_reporting.enabled"), "enabled": CONFIG.y("error_reporting.enabled"),
@ -85,8 +86,4 @@ class ConfigView(APIView):
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")), "cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
} }
) )
return Response(config.data)
@extend_schema(responses={200: ConfigSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Retrieve public configuration options"""
return Response(self.get_config().data)

View File

@ -12,10 +12,9 @@ from authentik.admin.api.version import VersionView
from authentik.admin.api.workers import WorkerView from authentik.admin.api.workers import WorkerView
from authentik.api.v3.config import ConfigView from authentik.api.v3.config import ConfigView
from authentik.api.views import APIBrowserView from authentik.api.views import APIBrowserView
from authentik.blueprints.api import BlueprintInstanceViewSet
from authentik.core.api.applications import ApplicationViewSet from authentik.core.api.applications import ApplicationViewSet
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet from authentik.core.api.devices import DeviceViewSet
from authentik.core.api.groups import GroupViewSet from authentik.core.api.groups import GroupViewSet
from authentik.core.api.propertymappings import PropertyMappingViewSet from authentik.core.api.propertymappings import PropertyMappingViewSet
from authentik.core.api.providers import ProviderViewSet from authentik.core.api.providers import ProviderViewSet
@ -132,8 +131,6 @@ router.register("events/notifications", NotificationViewSet)
router.register("events/transports", NotificationTransportViewSet) router.register("events/transports", NotificationTransportViewSet)
router.register("events/rules", NotificationRuleViewSet) router.register("events/rules", NotificationRuleViewSet)
router.register("managed/blueprints", BlueprintInstanceViewSet)
router.register("sources/all", SourceViewSet) router.register("sources/all", SourceViewSet)
router.register("sources/user_connections/all", UserSourceConnectionViewSet) router.register("sources/user_connections/all", UserSourceConnectionViewSet)
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet) router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
@ -174,11 +171,6 @@ router.register("authenticators/sms", SMSDeviceViewSet)
router.register("authenticators/static", StaticDeviceViewSet) router.register("authenticators/static", StaticDeviceViewSet)
router.register("authenticators/totp", TOTPDeviceViewSet) router.register("authenticators/totp", TOTPDeviceViewSet)
router.register("authenticators/webauthn", WebAuthnDeviceViewSet) router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
router.register(
"authenticators/admin/all",
AdminDeviceViewSet,
basename="admin-device",
)
router.register( router.register(
"authenticators/admin/duo", "authenticators/admin/duo",
DuoAdminDeviceViewSet, DuoAdminDeviceViewSet,

View File

@ -1,109 +0,0 @@
"""Serializer mixin for managed models"""
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField, JSONField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
class ManagedSerializer:
"""Managed Serializer"""
managed = CharField(read_only=True, allow_null=True)
class MetadataSerializer(PassiveSerializer):
"""Serializer for blueprint metadata"""
name = CharField()
labels = JSONField()
class BlueprintInstanceSerializer(ModelSerializer):
"""Info about a single blueprint instance file"""
def validate_path(self, path: str) -> str:
"""Ensure the path specified is retrievable"""
try:
BlueprintInstance(path=path).retrieve()
except BlueprintRetrievalFailed as exc:
raise ValidationError(exc) from exc
return path
class Meta:
model = BlueprintInstance
fields = [
"pk",
"name",
"path",
"context",
"last_applied",
"last_applied_hash",
"status",
"enabled",
"managed_models",
"metadata",
]
extra_kwargs = {
"status": {"read_only": True},
"last_applied": {"read_only": True},
"last_applied_hash": {"read_only": True},
"managed_models": {"read_only": True},
"metadata": {"read_only": True},
}
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
permission_classes = [IsAdminUser]
serializer_class = BlueprintInstanceSerializer
queryset = BlueprintInstance.objects.all()
search_fields = ["name", "path"]
filterset_fields = ["name", "path"]
@extend_schema(
responses={
200: ListSerializer(
child=inline_serializer(
"BlueprintFile",
fields={
"path": CharField(),
"last_m": DateTimeField(),
"hash": CharField(),
"meta": MetadataSerializer(required=False, read_only=True),
},
)
)
}
)
@action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response:
"""Get blueprints"""
files: list[dict] = blueprints_find_dict.delay().get()
return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance")
@extend_schema(
request=None,
responses={
200: BlueprintInstanceSerializer(),
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
def apply(self, request: Request, *args, **kwargs) -> Response:
"""Apply a blueprint"""
blueprint = self.get_object()
apply_blueprint.delay(str(blueprint.pk)).get()
return self.retrieve(request, *args, **kwargs)

View File

@ -1,22 +0,0 @@
"""authentik Blueprints app"""
from authentik.blueprints.manager import ManagedAppConfig
class AuthentikBlueprintsConfig(ManagedAppConfig):
"""authentik Blueprints app"""
name = "authentik.blueprints"
label = "authentik_blueprints"
verbose_name = "authentik Blueprints"
default = True
def reconcile_load_blueprints_v1_tasks(self):
"""Load v1 tasks"""
self.import_module("authentik.blueprints.v1.tasks")
def reconcile_blueprints_discover(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discover
blueprints_discover.delay()

View File

@ -1,28 +0,0 @@
"""Apply blueprint from commandline"""
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
LOGGER = get_logger()
class Command(BaseCommand):
"""Apply blueprint from commandline"""
@no_translations
def handle(self, *args, **options):
"""Apply all blueprints in order, abort when one fails to import"""
for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer(content)
valid, logs = importer.validate()
if not valid:
for log in logs:
LOGGER.debug(**log)
raise ValueError("blueprint invalid")
importer.apply()
def add_arguments(self, parser):
parser.add_argument("blueprints", nargs="+", type=str)

View File

@ -1,17 +0,0 @@
"""Export blueprint of current authentik install"""
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.v1.exporter import Exporter
LOGGER = get_logger()
class Command(BaseCommand):
"""Export blueprint of current authentik install"""
@no_translations
def handle(self, *args, **options):
"""Export blueprint of current authentik install"""
exporter = Exporter()
self.stdout.write(exporter.export_to_string())

View File

@ -1,38 +0,0 @@
"""Generate JSON Schema for blueprints"""
from json import dumps, loads
from pathlib import Path
from django.apps import apps
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
class Command(BaseCommand):
"""Generate JSON Schema for blueprints"""
schema: dict
@no_translations
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
path = Path(__file__).parent.joinpath("./schema_template.json")
with open(path, "r", encoding="utf-8") as _template_file:
self.schema = loads(_template_file.read())
self.set_model_allowed()
self.stdout.write(dumps(self.schema, indent=4))
def set_model_allowed(self):
"""Set model enum"""
model_names = []
for model in apps.get_models():
if not is_model_allowed(model):
continue
if SerializerModel not in model.__mro__:
continue
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names

View File

@ -1,90 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "http://example.com/example.json",
"type": "object",
"title": "authentik Blueprint schema",
"default": {},
"required": [
"version",
"entries"
],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"labels": {
"type": "object"
}
}
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": true
},
"entries": {
"type": "array",
"items": {
"$id": "#entry",
"type": "object",
"required": [
"model",
"identifiers"
],
"properties": {
"model": {
"type": "string",
"enum": [
"placeholder"
]
},
"id": {
"type": "string"
},
"attrs": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Commonly available field, may not exist on all models"
}
},
"default": {},
"additionalProperties": true
},
"identifiers": {
"type": "object",
"properties": {
"pk": {
"description": "Commonly available field, may not exist on all models",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"format": "uuid"
}
]
}
},
"additionalProperties": true
}
}
}
}
}
}

View File

@ -1,44 +0,0 @@
"""Managed objects manager"""
from importlib import import_module
from inspect import ismethod
from django.apps import AppConfig
from django.db import DatabaseError, InternalError, ProgrammingError
from structlog.stdlib import BoundLogger, get_logger
LOGGER = get_logger()
class ManagedAppConfig(AppConfig):
"""Basic reconciliation logic for apps"""
_logger: BoundLogger
def __init__(self, app_name: str, *args, **kwargs) -> None:
super().__init__(app_name, *args, **kwargs)
self._logger = get_logger().bind(app_name=app_name)
def ready(self) -> None:
self.reconcile()
return super().ready()
def import_module(self, path: str):
"""Load module"""
import_module(path)
def reconcile(self) -> None:
"""reconcile ourselves"""
prefix = "reconcile_"
for meth_name in dir(self):
meth = getattr(self, meth_name)
if not ismethod(meth):
continue
if not meth_name.startswith(prefix):
continue
name = meth_name.replace(prefix, "")
try:
self._logger.debug("Starting reconciler", name=name)
meth()
self._logger.debug("Successfully reconciled", name=name)
except (DatabaseError, ProgrammingError, InternalError) as exc:
self._logger.debug("Failed to run reconcile", name=name, exc=exc)

View File

@ -1,135 +0,0 @@
# Generated by Django 4.0.6 on 2022-07-31 17:35
import uuid
from glob import glob
from pathlib import Path
import django.contrib.postgres.fields
from dacite import from_dict
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from yaml import load
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
from authentik.lib.config import CONFIG
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
"""Check if blueprint should be imported"""
from authentik.blueprints.models import BlueprintInstanceStatus
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
with open(path, "r", encoding="utf-8") as blueprint_file:
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
if not raw_blueprint:
return
metadata = raw_blueprint.get("metadata", None)
version = raw_blueprint.get("version", 1)
if version != 1:
return
blueprint_file.seek(0)
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
meta = None
if metadata:
meta = from_dict(BlueprintMetadata, metadata)
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
return
if not instance:
instance = BlueprintInstance(
name=meta.name if meta else str(rel_path),
path=str(rel_path),
context={},
status=BlueprintInstanceStatus.UNKNOWN,
enabled=True,
managed_models=[],
last_applied_hash="",
metadata=metadata,
)
instance.save()
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
BlueprintInstance = apps.get_model("authentik_blueprints", "BlueprintInstance")
Flow = apps.get_model("authentik_flows", "Flow")
db_alias = schema_editor.connection.alias
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
check_blueprint_v1_file(BlueprintInstance, Path(file))
for blueprint in BlueprintInstance.objects.using(db_alias).all():
# If we already have flows (and we should always run before flow migrations)
# then this is an existing install and we want to disable all blueprints
if Flow.objects.using(db_alias).all().exists():
blueprint.enabled = False
# System blueprints are always enabled
if blueprint.metadata.get("labels", {}).get(LABEL_AUTHENTIK_SYSTEM, "").lower() == "true":
blueprint.enabled = True
blueprint.save()
class Migration(migrations.Migration):
initial = True
dependencies = [("authentik_flows", "0001_initial")]
operations = [
migrations.CreateModel(
name="BlueprintInstance",
fields=[
("created", models.DateTimeField(auto_now_add=True)),
("last_updated", models.DateTimeField(auto_now=True)),
(
"managed",
models.TextField(
default=None,
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
),
),
(
"instance_uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("name", models.TextField()),
("metadata", models.JSONField(default=dict)),
("path", models.TextField()),
("context", models.JSONField(default=dict)),
("last_applied", models.DateTimeField(auto_now=True)),
("last_applied_hash", models.TextField()),
(
"status",
models.TextField(
choices=[
("successful", "Successful"),
("warning", "Warning"),
("error", "Error"),
("orphaned", "Orphaned"),
("unknown", "Unknown"),
],
default="unknown",
),
),
("enabled", models.BooleanField(default=True)),
(
"managed_models",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
],
options={
"verbose_name": "Blueprint Instance",
"verbose_name_plural": "Blueprint Instances",
"unique_together": {("name", "path")},
},
),
migrations.RunPython(migration_blueprint_import),
]

View File

@ -1,105 +0,0 @@
"""Managed Object models"""
from pathlib import Path
from urllib.parse import urlparse
from uuid import uuid4
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils.translation import gettext_lazy as _
from requests import RequestException
from rest_framework.serializers import Serializer
from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import get_http_session
class BlueprintRetrievalFailed(SentryIgnoredException):
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
not being accessible or local files not being readable"""
class ManagedModel(models.Model):
"""Model which can be managed by authentik exclusively"""
managed = models.TextField(
default=None,
null=True,
verbose_name=_("Managed by authentik"),
help_text=_(
(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
)
),
unique=True,
)
class Meta:
abstract = True
class BlueprintInstanceStatus(models.TextChoices):
"""Instance status"""
SUCCESSFUL = "successful"
WARNING = "warning"
ERROR = "error"
ORPHANED = "orphaned"
UNKNOWN = "unknown"
class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
"""Instance of a single blueprint. Can be parameterized via context attribute when
blueprint in `path` has inputs."""
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField()
context = models.JSONField(default=dict)
last_applied = models.DateTimeField(auto_now=True)
last_applied_hash = models.TextField()
status = models.TextField(
choices=BlueprintInstanceStatus.choices, default=BlueprintInstanceStatus.UNKNOWN
)
enabled = models.BooleanField(default=True)
managed_models = ArrayField(models.TextField(), default=list)
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if urlparse(self.path).scheme != "":
try:
res = get_http_session().get(self.path, timeout=3, allow_redirects=True)
res.raise_for_status()
return res.text
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with path.open("r", encoding="utf-8") as _file:
return _file.read()
@property
def serializer(self) -> Serializer:
from authentik.blueprints.api import BlueprintInstanceSerializer
return BlueprintInstanceSerializer
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (
(
"name",
"path",
),
)

View File

@ -1,12 +0,0 @@
"""blueprint Settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"blueprints_v1_discover": {
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,48 +0,0 @@
"""Blueprint helpers"""
from functools import wraps
from pathlib import Path
from typing import Callable
from django.apps import apps
from authentik.blueprints.manager import ManagedAppConfig
from authentik.blueprints.models import BlueprintInstance
from authentik.lib.config import CONFIG
def apply_blueprint(*files: str):
"""Apply blueprint before test"""
from authentik.blueprints.v1.importer import Importer
def wrapper_outer(func: Callable):
"""Apply blueprint before test"""
@wraps(func)
def wrapper(*args, **kwargs):
for file in files:
content = BlueprintInstance(path=file).retrieve()
Importer(content).apply()
return func(*args, **kwargs)
return wrapper
return wrapper_outer
def reconcile_app(app_name: str):
"""Re-reconcile AppConfig methods"""
def wrapper_outer(func: Callable):
"""Re-reconcile AppConfig methods"""
@wraps(func)
def wrapper(*args, **kwargs):
config = apps.get_app_config(app_name)
if isinstance(config, ManagedAppConfig):
config.reconcile()
return func(*args, **kwargs)
return wrapper
return wrapper_outer

View File

@ -1,34 +0,0 @@
"""authentik managed models tests"""
from typing import Callable, Type
from django.apps import apps
from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
class TestModels(TestCase):
"""Test Models"""
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
"""Test serializer"""
def tester(self: TestModels):
if test_model._meta.abstract:
return
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
self.assertIsNotNone(model_class.serializer)
return tester
for app in apps.get_app_configs():
if not app.label.startswith("authentik"):
continue
for model in app.get_models():
if not is_model_allowed(model):
continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -1,36 +0,0 @@
"""test packaged blueprints"""
from pathlib import Path
from typing import Callable
from django.test import TransactionTestCase
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.tests import apply_blueprint
from authentik.blueprints.v1.importer import Importer
from authentik.tenants.models import Tenant
class TestPackaged(TransactionTestCase):
"""Empty class, test methods are added dynamically"""
@apply_blueprint("default/90-default-tenant.yaml")
def test_decorator_static(self):
"""Test @apply_blueprint decorator"""
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
def blueprint_tester(file_name: Path) -> Callable:
"""This is used instead of subTest for better visibility"""
def tester(self: TestPackaged):
base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base)
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
return tester
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))

View File

@ -1,45 +0,0 @@
"""Test blueprints v1 api"""
from json import loads
from tempfile import NamedTemporaryFile, mkdtemp
from django.urls import reverse
from rest_framework.test import APITestCase
from yaml import dump
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.config import CONFIG
TMP = mkdtemp("authentik-blueprints")
class TestBlueprintsV1API(APITestCase):
"""Test Blueprints API"""
def setUp(self) -> None:
self.user = create_test_admin_user()
self.client.force_login(self.user)
@CONFIG.patch("blueprints_dir", TMP)
def test_api_available(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
res = self.client.get(reverse("authentik_api:blueprintinstance-available"))
self.assertEqual(res.status_code, 200)
response = loads(res.content.decode())
self.assertEqual(len(response), 1)
self.assertEqual(
response[0]["hash"],
(
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1bd1f9b352"
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)

View File

@ -1,148 +0,0 @@
"""Test blueprints v1 tasks"""
from hashlib import sha512
from tempfile import NamedTemporaryFile, mkdtemp
from django.test import TransactionTestCase
from yaml import dump
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id
TMP = mkdtemp("authentik-blueprints")
class TestBlueprintsV1Tasks(TransactionTestCase):
"""Test Blueprints v1 Tasks"""
@CONFIG.patch("blueprints_dir", TMP)
def test_invalid_file_syntax(self):
"""Test syntactically invalid file"""
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
file.write(b"{")
file.flush()
blueprints = blueprints_find()
self.assertEqual(blueprints, [])
@CONFIG.patch("blueprints_dir", TMP)
def test_invalid_file_version(self):
"""Test invalid file"""
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
file.write(b"version: 2")
file.flush()
blueprints = blueprints_find()
self.assertEqual(blueprints, [])
@CONFIG.patch("blueprints_dir", TMP)
def test_valid(self):
"""Test valid file"""
blueprint_id = generate_id()
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
"metadata": {
"name": blueprint_id,
},
}
)
)
file.seek(0)
file_hash = sha512(file.read().encode()).hexdigest()
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
self.assertEqual(instance.last_applied_hash, file_hash)
self.assertEqual(
instance.metadata,
{
"name": blueprint_id,
"labels": {},
},
)
@CONFIG.patch("blueprints_dir", TMP)
def test_valid_updated(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
self.assertEqual(
BlueprintInstance.objects.first().last_applied_hash,
(
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1b"
"d1f9b3526871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)
self.assertEqual(BlueprintInstance.objects.first().metadata, {})
file.write(
dump(
{
"version": 1,
"entries": [],
"metadata": {
"name": "foo",
},
}
)
)
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
self.assertEqual(
BlueprintInstance.objects.first().last_applied_hash,
(
"fc62fea96067da8592bdf90927246d0ca150b045447df93b0652a0e20a8bc327"
"681510b5db37ea98759c61f9a98dd2381f46a3b5a2da69dfb45158897f14e824"
),
)
self.assertEqual(
BlueprintInstance.objects.first().metadata,
{
"name": "foo",
"labels": {},
},
)
@CONFIG.patch("blueprints_dir", TMP)
def test_valid_disabled(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
instance: BlueprintInstance = BlueprintInstance.objects.create(
name=generate_id(),
path=file.name,
enabled=False,
status=BlueprintInstanceStatus.UNKNOWN,
)
instance.refresh_from_db()
self.assertEqual(instance.last_applied_hash, "")
self.assertEqual(
instance.status,
BlueprintInstanceStatus.UNKNOWN,
)
apply_blueprint(instance.pk) # pylint: disable=no-value-for-parameter
instance.refresh_from_db()
self.assertEqual(instance.last_applied_hash, "")
self.assertEqual(
instance.status,
BlueprintInstanceStatus.UNKNOWN,
)

View File

@ -1,255 +0,0 @@
"""transfer common classes"""
from collections import OrderedDict
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from typing import Any, Optional
from uuid import UUID
from django.apps import apps
from django.db.models import Model, Q
from rest_framework.fields import Field
from rest_framework.serializers import Serializer
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
from authentik.lib.models import SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.models import PolicyBindingModel
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
serializer: Serializer = obj.serializer(obj)
data = dict(serializer.data)
for field_name, _field in serializer.fields.items():
_field: Field
if field_name not in data:
continue
if _field.read_only:
data.pop(field_name, None)
if _field.get_initial() == data.get(field_name, None):
data.pop(field_name, None)
if field_name.endswith("_set"):
data.pop(field_name, None)
return data
@dataclass
class BlueprintEntryState:
"""State of a single instance"""
instance: Optional[Model] = None
@dataclass
class BlueprintEntry:
"""Single entry of a blueprint"""
identifiers: dict[str, Any]
model: str
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
# pylint: disable=invalid-name
id: Optional[str] = None
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
@staticmethod
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
"""Convert a SerializerModel instance to a blueprint Entry"""
identifiers = {
"pk": model.pk,
}
all_attrs = get_attrs(model)
for extra_identifier_name in extra_identifier_names:
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name)
return BlueprintEntry(
identifiers=identifiers,
model=f"{model._meta.app_label}.{model._meta.model_name}",
attrs=all_attrs,
)
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
"""Check if we have any special tags that need handling"""
if isinstance(value, YAMLTag):
return value.resolve(self, blueprint)
if isinstance(value, dict):
for key, inner_value in value.items():
value[key] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, list):
for idx, inner_value in enumerate(value):
value[idx] = self.tag_resolver(inner_value, blueprint)
return value
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
return self.tag_resolver(self.attrs, blueprint)
def get_identifiers(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
return self.tag_resolver(self.identifiers, blueprint)
@dataclass
class BlueprintMetadata:
"""Optional blueprint metadata"""
name: str
labels: dict[str, str] = field(default_factory=dict)
@dataclass
class Blueprint:
"""Dataclass used for a full export"""
version: int = field(default=1)
entries: list[BlueprintEntry] = field(default_factory=list)
metadata: Optional[BlueprintMetadata] = field(default=None)
context: Optional[dict] = field(default_factory=dict)
class YAMLTag:
"""Base class for all YAML Tags"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic"""
raise NotImplementedError
class KeyOf(YAMLTag):
"""Reference another object by their ID"""
id_from: str
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.id_from = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
for _entry in blueprint.entries:
if _entry.id == self.id_from and _entry._state.instance:
# Special handling for PolicyBindingModels, as they'll have a different PK
# which is used when creating policy bindings
if (
isinstance(_entry._state.instance, PolicyBindingModel)
and entry.model.lower() == "authentik_policies.policybinding"
):
return _entry._state.instance.pbm_uuid
return _entry._state.instance.pk
raise ValueError(
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
)
class Context(YAMLTag):
"""Lookup key from instance context"""
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
if isinstance(node, ScalarNode):
self.key = node.value
if isinstance(node, SequenceNode):
self.key = node.value[0].value
self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
value = self.default
if self.key in blueprint.context:
value = blueprint.context[self.key]
return value
class Format(YAMLTag):
"""Format a string"""
format_string: str
args: list[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.format_string = node.value[0].value
self.args = []
for raw_node in node.value[1:]:
self.args.append(raw_node.value)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
return self.format_string % tuple(self.args)
except TypeError as exc:
raise EntryInvalidError(exc)
class Find(YAMLTag):
"""Find any object"""
model_name: str
conditions: list[list]
model_class: type[Model]
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.model_name = node.value[0].value
self.model_class = apps.get_model(*self.model_name.split("."))
self.conditions = []
for raw_node in node.value[1:]:
values = []
for node_values in raw_node.value:
values.append(loader.construct_object(node_values))
self.conditions.append(values)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
query = Q()
for cond in self.conditions:
query &= Q(**{cond[0]: cond[1]})
instance = self.model_class.objects.filter(query).first()
if instance:
return instance.pk
return None
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
default_flow_style = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
def represent(self, data) -> None:
if is_dataclass(data):
def factory(items):
final_dict = dict(items)
final_dict.pop("_state", None)
return final_dict
data = asdict(data, dict_factory=factory)
return super().represent(data)
class BlueprintLoader(SafeLoader):
"""Loader for blueprints with custom tag support"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_constructor("!KeyOf", KeyOf)
self.add_constructor("!Find", Find)
self.add_constructor("!Context", Context)
self.add_constructor("!Format", Format)
class EntryInvalidError(SentryIgnoredException):
"""Error raised when an entry is invalid"""

View File

@ -1,140 +0,0 @@
"""Blueprint exporter"""
from typing import Iterator
from uuid import UUID
from django.apps import apps
from django.db.models import Q
from django.utils.timezone import now
from django.utils.translation import gettext as _
from yaml import dump
from authentik.blueprints.v1.common import (
Blueprint,
BlueprintDumper,
BlueprintEntry,
BlueprintMetadata,
)
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_GENERATED
from authentik.flows.models import Flow, FlowStageBinding, Stage
from authentik.lib.models import SerializerModel
from authentik.policies.models import Policy, PolicyBinding
from authentik.stages.prompt.models import PromptStage
class Exporter:
"""Export flow with attached stages into yaml"""
excluded_models = []
def __init__(self):
self.excluded_models = []
def get_entries(self) -> Iterator[BlueprintEntry]:
"""Get blueprint entries"""
for model in apps.get_models():
if not is_model_allowed(model):
continue
if model in self.excluded_models:
continue
if SerializerModel not in model.__mro__:
continue
for obj in model.objects.all():
yield BlueprintEntry.from_model(obj)
def _pre_export(self, blueprint: Blueprint):
"""Hook to run anything pre-export"""
def export(self) -> Blueprint:
"""Create a list of all objects and create a blueprint"""
blueprint = Blueprint()
self._pre_export(blueprint)
blueprint.metadata = BlueprintMetadata(
name=_("authentik Export - %(date)s" % {"date": str(now())}),
labels={
LABEL_AUTHENTIK_GENERATED: "true",
},
)
blueprint.entries = list(self.get_entries())
return blueprint
def export_to_string(self) -> str:
"""Call export and convert it to yaml"""
blueprint = self.export()
return dump(blueprint, Dumper=BlueprintDumper)
class FlowExporter(Exporter):
"""Exporter customised to only return objects related to `flow`"""
flow: Flow
with_policies: bool
with_stage_prompts: bool
pbm_uuids: list[UUID]
def __init__(self, flow: Flow):
super().__init__()
self.flow = flow
self.with_policies = True
self.with_stage_prompts = True
def _pre_export(self, blueprint: Blueprint):
if not self.with_policies:
return
self.pbm_uuids = [self.flow.pbm_uuid]
self.pbm_uuids += FlowStageBinding.objects.filter(target=self.flow).values_list(
"pbm_uuid", flat=True
)
def walk_stages(self) -> Iterator[BlueprintEntry]:
"""Convert all stages attached to self.flow into BlueprintEntry objects"""
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
for stage in stages:
if isinstance(stage, PromptStage):
pass
yield BlueprintEntry.from_model(stage, "name")
def walk_stage_bindings(self) -> Iterator[BlueprintEntry]:
"""Convert all bindings attached to self.flow into BlueprintEntry objects"""
bindings = FlowStageBinding.objects.filter(target=self.flow).select_related()
for binding in bindings:
yield BlueprintEntry.from_model(binding, "target", "stage", "order")
def walk_policies(self) -> Iterator[BlueprintEntry]:
"""Walk over all policies. This is done at the beginning of the export for stages that have
a direct foreign key to a policy."""
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
# all policies referenced in there we also include here
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list("pk", flat=True)
query = Q(bindings__in=self.pbm_uuids) | Q(promptstage__in=prompt_stages)
policies = Policy.objects.filter(query).select_related()
for policy in policies:
yield BlueprintEntry.from_model(policy)
def walk_policy_bindings(self) -> Iterator[BlueprintEntry]:
"""Walk over all policybindings relative to us. This is run at the end of the export, as
we are sure all objects exist now."""
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
for binding in bindings:
yield BlueprintEntry.from_model(binding, "policy", "target", "order")
def walk_stage_prompts(self) -> Iterator[BlueprintEntry]:
"""Walk over all prompts associated with any PromptStages"""
prompt_stages = PromptStage.objects.filter(flow=self.flow)
for stage in prompt_stages:
for prompt in stage.fields.all():
yield BlueprintEntry.from_model(prompt)
def get_entries(self) -> Iterator[BlueprintEntry]:
entries = []
entries.append(BlueprintEntry.from_model(self.flow, "slug"))
if self.with_stage_prompts:
entries.extend(self.walk_stage_prompts())
if self.with_policies:
entries.extend(self.walk_policies())
entries.extend(self.walk_stages())
entries.extend(self.walk_stage_bindings())
if self.with_policies:
entries.extend(self.walk_policy_bindings())
return entries

View File

@ -1,5 +0,0 @@
"""Blueprint labels"""
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"

View File

@ -1,174 +0,0 @@
"""v1 blueprints tasks"""
from dataclasses import asdict, dataclass, field
from hashlib import sha512
from pathlib import Path
from typing import Optional
from dacite import from_dict
from django.db import DatabaseError, InternalError, ProgrammingError
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from structlog.stdlib import get_logger
from yaml import load
from yaml.error import YAMLError
from authentik.blueprints.models import (
BlueprintInstance,
BlueprintInstanceStatus,
BlueprintRetrievalFailed,
)
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
from authentik.events.monitored_tasks import (
MonitoredTask,
TaskResult,
TaskResultStatus,
prefill_task,
)
from authentik.events.utils import sanitize_dict
from authentik.lib.config import CONFIG
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@dataclass
class BlueprintFile:
"""Basic info about a blueprint file"""
path: str
version: int
hash: str
last_m: int
meta: Optional[BlueprintMetadata] = field(default=None)
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError),
)
def blueprints_find_dict():
"""Find blueprints as `blueprints_find` does, but return a safe dict"""
blueprints = []
for blueprint in blueprints_find():
blueprints.append(sanitize_dict(asdict(blueprint)))
return blueprints
def blueprints_find():
"""Find blueprints and return valid ones"""
blueprints = []
root = Path(CONFIG.y("blueprints_dir"))
for file in root.glob("**/*.yaml"):
path = Path(file)
LOGGER.debug("found blueprint", path=str(path))
with open(path, "r", encoding="utf-8") as blueprint_file:
try:
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
except YAMLError as exc:
raw_blueprint = None
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
if not raw_blueprint:
continue
metadata = raw_blueprint.get("metadata", None)
version = raw_blueprint.get("version", 1)
if version != 1:
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
continue
file_hash = sha512(path.read_bytes()).hexdigest()
blueprint = BlueprintFile(path.relative_to(root), version, file_hash, path.stat().st_mtime)
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
blueprints.append(blueprint)
LOGGER.info(
"parsed & loaded blueprint",
hash=file_hash,
path=str(path),
)
return blueprints
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
)
@prefill_task
def blueprints_discover(self: MonitoredTask):
"""Find blueprints and check if they need to be created in the database"""
count = 0
for blueprint in blueprints_find():
check_blueprint_v1_file(blueprint)
count += 1
self.set_status(
TaskResult(
TaskResultStatus.SUCCESSFUL,
messages=[_("Successfully imported %(count)d files." % {"count": count})],
)
)
def check_blueprint_v1_file(blueprint: BlueprintFile):
"""Check if blueprint should be imported"""
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=blueprint.path).first()
if (
blueprint.meta
and blueprint.meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false"
):
return
if not instance:
instance = BlueprintInstance(
name=blueprint.meta.name if blueprint.meta else str(blueprint.path),
path=blueprint.path,
context={},
status=BlueprintInstanceStatus.UNKNOWN,
enabled=True,
managed_models=[],
metadata={},
)
instance.save()
if instance.last_applied_hash != blueprint.hash:
instance.metadata = asdict(blueprint.meta) if blueprint.meta else {}
instance.save()
apply_blueprint.delay(instance.pk.hex)
@CELERY_APP.task(
bind=True,
base=MonitoredTask,
)
def apply_blueprint(self: MonitoredTask, instance_pk: str):
"""Apply single blueprint"""
self.set_uid(instance_pk)
self.save_on_success = False
try:
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
if not instance or not instance.enabled:
return
blueprint_content = instance.retrieve()
file_hash = sha512(blueprint_content.encode()).hexdigest()
importer = Importer(blueprint_content, instance.context)
valid, logs = importer.validate()
if not valid:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
return
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
return
instance.status = BlueprintInstanceStatus.SUCCESSFUL
instance.last_applied_hash = file_hash
instance.last_applied = now()
instance.save()
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
except (
DatabaseError,
ProgrammingError,
InternalError,
IOError,
BlueprintRetrievalFailed,
) as exc:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))

View File

@ -98,6 +98,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"group", "group",
] ]
lookup_field = "slug" lookup_field = "slug"
filterset_fields = ["name", "slug"]
ordering = ["name"] ordering = ["name"]
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet: def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:

View File

@ -1,10 +1,9 @@
"""Authenticator Devices API Views""" """Authenticator Devices API Views"""
from django_otp import device_classes, devices_for_user from django_otp import devices_for_user
from django_otp.models import Device from django_otp.models import Device
from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema
from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.fields import CharField, IntegerField, SerializerMethodField
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.viewsets import ViewSet from rest_framework.viewsets import ViewSet
@ -18,7 +17,6 @@ class DeviceSerializer(MetaNameSerializer):
pk = IntegerField() pk = IntegerField()
name = CharField() name = CharField()
type = SerializerMethodField() type = SerializerMethodField()
confirmed = BooleanField()
def get_type(self, instance: Device) -> str: def get_type(self, instance: Device) -> str:
"""Get type of device""" """Get type of device"""
@ -36,33 +34,3 @@ class DeviceViewSet(ViewSet):
"""Get all devices for current user""" """Get all devices for current user"""
devices = devices_for_user(request.user) devices = devices_for_user(request.user)
return Response(DeviceSerializer(devices, many=True).data) return Response(DeviceSerializer(devices, many=True).data)
class AdminDeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
serializer_class = DeviceSerializer
permission_classes = [IsAdminUser]
def get_devices(self, **kwargs):
"""Get all devices in all child classes"""
for model in device_classes():
device_set = model.objects.filter(**kwargs)
yield from device_set
@extend_schema(
parameters=[
OpenApiParameter(
name="user",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.INT,
)
],
responses={200: DeviceSerializer(many=True)},
)
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
kwargs = {}
if "user" in request.query_params:
kwargs = {"user": request.query_params["user"]}
return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)

View File

@ -62,11 +62,6 @@ class GroupSerializer(ModelSerializer):
"attributes", "attributes",
"users_obj", "users_obj",
] ]
extra_kwargs = {
"users": {
"default": list,
}
}
class GroupFilter(FilterSet): class GroupFilter(FilterSet):

View File

@ -14,12 +14,12 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import GenericViewSet
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.blueprints.api import ManagedSerializer
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
from authentik.core.expression import PropertyMappingEvaluator from authentik.core.expression import PropertyMappingEvaluator
from authentik.core.models import PropertyMapping from authentik.core.models import PropertyMapping
from authentik.lib.utils.reflection import all_subclasses from authentik.lib.utils.reflection import all_subclasses
from authentik.managed.api import ManagedSerializer
from authentik.policies.api.exec import PolicyTestSerializer from authentik.policies.api.exec import PolicyTestSerializer

View File

@ -15,13 +15,13 @@ from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.blueprints.api import ManagedSerializer
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict from authentik.events.utils import model_to_dict
from authentik.managed.api import ManagedSerializer
class TokenSerializer(ManagedSerializer, ModelSerializer): class TokenSerializer(ManagedSerializer, ModelSerializer):

View File

@ -1,37 +1,22 @@
"""authentik core app config""" """authentik core app config"""
from importlib import import_module
from django.apps import AppConfig
from django.conf import settings from django.conf import settings
from authentik.blueprints.manager import ManagedAppConfig
class AuthentikCoreConfig(AppConfig):
class AuthentikCoreConfig(ManagedAppConfig):
"""authentik core app config""" """authentik core app config"""
name = "authentik.core" name = "authentik.core"
label = "authentik_core" label = "authentik_core"
verbose_name = "authentik Core" verbose_name = "authentik Core"
mountpoint = "" mountpoint = ""
default = True
def reconcile_load_core_signals(self): def ready(self):
"""Load core signals""" import_module("authentik.core.signals")
self.import_module("authentik.core.signals") import_module("authentik.core.managed")
def reconcile_debug_worker_hook(self):
"""Dispatch startup tasks inline when debugging"""
if settings.DEBUG: if settings.DEBUG:
from authentik.root.celery import worker_ready_hook from authentik.root.celery import worker_ready_hook
worker_ready_hook() worker_ready_hook()
def reconcile_source_inbuilt(self):
"""Reconcile inbuilt source"""
from authentik.core.models import Source
Source.objects.update_or_create(
defaults={
"name": "authentik Built-in",
"slug": "authentik-built-in",
},
managed="goauthentik.io/sources/inbuilt",
)

17
authentik/core/managed.py Normal file
View File

@ -0,0 +1,17 @@
"""Core managed objects"""
from authentik.core.models import Source
from authentik.managed.manager import EnsureExists, ObjectManager
class CoreManager(ObjectManager):
"""Core managed objects"""
def reconcile(self):
return [
EnsureExists(
Source,
"goauthentik.io/sources/inbuilt",
name="authentik Built-in",
slug="authentik-built-in",
),
]

View File

@ -4,7 +4,7 @@ from django.core.management.base import BaseCommand
from authentik.root.celery import _get_startup_tasks from authentik.root.celery import _get_startup_tasks
class Command(BaseCommand): class Command(BaseCommand): # pragma: no cover
"""Run bootstrap tasks to ensure certain objects are created""" """Run bootstrap tasks to ensure certain objects are created"""
def handle(self, **options): def handle(self, **options):

View File

@ -5,7 +5,7 @@ from django.core.management.base import BaseCommand, no_translations
from guardian.management import create_anonymous_user from guardian.management import create_anonymous_user
class Command(BaseCommand): class Command(BaseCommand): # pragma: no cover
"""Repair missing permissions""" """Repair missing permissions"""
@no_translations @no_translations

View File

@ -22,7 +22,7 @@ BANNER_TEXT = """### authentik shell ({authentik})
) )
class Command(BaseCommand): class Command(BaseCommand): # pragma: no cover
"""Start the Django shell with all authentik models already imported""" """Start the Django shell with all authentik models already imported"""
django_models = {} django_models = {}
@ -40,6 +40,9 @@ class Command(BaseCommand):
# Gather Django models and constants from each app # Gather Django models and constants from each app
for app in apps.get_app_configs(): for app in apps.get_app_configs():
if not app.name.startswith("authentik"):
continue
# Load models from each app # Load models from each app
for model in app.get_models(): for model in app.get_models():
namespace[model.__name__] = model namespace[model.__name__] = model

View File

@ -1,22 +1,19 @@
"""authentik admin Middleware to impersonate users""" """authentik admin Middleware to impersonate users"""
from contextvars import ContextVar from logging import Logger
from threading import local
from typing import Callable from typing import Callable
from uuid import uuid4 from uuid import uuid4
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
from sentry_sdk.api import set_tag from sentry_sdk.api import set_tag
from structlog.contextvars import STRUCTLOG_KEY_PREFIX
SESSION_KEY_IMPERSONATE_USER = "authentik/impersonate/user" SESSION_KEY_IMPERSONATE_USER = "authentik/impersonate/user"
SESSION_KEY_IMPERSONATE_ORIGINAL_USER = "authentik/impersonate/original_user" SESSION_KEY_IMPERSONATE_ORIGINAL_USER = "authentik/impersonate/original_user"
LOCAL = local()
RESPONSE_HEADER_ID = "X-authentik-id" RESPONSE_HEADER_ID = "X-authentik-id"
KEY_AUTH_VIA = "auth_via" KEY_AUTH_VIA = "auth_via"
KEY_USER = "user" KEY_USER = "user"
CTX_REQUEST_ID = ContextVar(STRUCTLOG_KEY_PREFIX + "request_id", default=None)
CTX_HOST = ContextVar(STRUCTLOG_KEY_PREFIX + "host", default=None)
CTX_AUTH_VIA = ContextVar(STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None)
class ImpersonateMiddleware: class ImpersonateMiddleware:
"""Middleware to impersonate users""" """Middleware to impersonate users"""
@ -50,20 +47,26 @@ class RequestIDMiddleware:
if not hasattr(request, "request_id"): if not hasattr(request, "request_id"):
request_id = uuid4().hex request_id = uuid4().hex
setattr(request, "request_id", request_id) setattr(request, "request_id", request_id)
CTX_REQUEST_ID.set(request_id) LOCAL.authentik = {
CTX_HOST.set(request.get_host()) "request_id": request_id,
"host": request.get_host(),
}
set_tag("authentik.request_id", request_id) set_tag("authentik.request_id", request_id)
if hasattr(request, "user") and getattr(request.user, "is_authenticated", False):
CTX_AUTH_VIA.set("session")
else:
CTX_AUTH_VIA.set("unauthenticated")
response = self.get_response(request) response = self.get_response(request)
response[RESPONSE_HEADER_ID] = request.request_id response[RESPONSE_HEADER_ID] = request.request_id
setattr(response, "ak_context", {}) setattr(response, "ak_context", {})
response.ak_context["request_id"] = CTX_REQUEST_ID.get() response.ak_context.update(LOCAL.authentik)
response.ak_context["host"] = CTX_HOST.get() response.ak_context.setdefault(KEY_USER, request.user.username)
response.ak_context[KEY_AUTH_VIA] = CTX_AUTH_VIA.get() for key in list(LOCAL.authentik.keys()):
response.ak_context[KEY_USER] = request.user.username del LOCAL.authentik[key]
return response return response
# pylint: disable=unused-argument
def structlog_add_request_id(logger: Logger, method_name: str, event_dict: dict):
"""If threadlocal has authentik defined, add request_id to log"""
if hasattr(LOCAL, "authentik"):
event_dict.update(LOCAL.authentik)
if hasattr(LOCAL, "authentik_task"):
event_dict.update(LOCAL.authentik_task)
return event_dict

View File

@ -1,26 +0,0 @@
# Generated by Django 4.0.6 on 2022-08-05 22:01
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0021_source_user_path_user_path"),
]
operations = [
migrations.AlterField(
model_name="group",
name="parent",
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="children",
to="authentik_core.group",
),
),
]

View File

@ -23,14 +23,14 @@ from model_utils.managers import InheritanceManager
from rest_framework.serializers import Serializer from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.blueprints.models import ManagedModel
from authentik.core.exceptions import PropertyMappingExpressionException from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.signals import password_changed from authentik.core.signals import password_changed
from authentik.core.types import UILoginButton, UserSettingSerializer from authentik.core.types import UILoginButton, UserSettingSerializer
from authentik.lib.config import CONFIG, get_path_from_dict from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
from authentik.lib.utils.http import get_client_ip from authentik.lib.utils.http import get_client_ip
from authentik.managed.models import ManagedModel
from authentik.policies.models import PolicyBindingModel from authentik.policies.models import PolicyBindingModel
LOGGER = get_logger() LOGGER = get_logger()
@ -68,7 +68,7 @@ def default_token_key():
return generate_id(int(CONFIG.y("default_token_length"))) return generate_id(int(CONFIG.y("default_token_length")))
class Group(SerializerModel): class Group(models.Model):
"""Custom Group model which supports a basic hierarchy""" """Custom Group model which supports a basic hierarchy"""
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -82,18 +82,11 @@ class Group(SerializerModel):
"Group", "Group",
blank=True, blank=True,
null=True, null=True,
default=None,
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
related_name="children", related_name="children",
) )
attributes = models.JSONField(default=dict, blank=True) attributes = models.JSONField(default=dict, blank=True)
@property
def serializer(self) -> Serializer:
from authentik.core.api.groups import GroupSerializer
return GroupSerializer
@property @property
def num_pk(self) -> int: def num_pk(self) -> int:
"""Get a numerical, int32 ID for the group""" """Get a numerical, int32 ID for the group"""
@ -146,7 +139,7 @@ class UserManager(DjangoUserManager):
return self._create_user(username, email, password, **extra_fields) return self._create_user(username, email, password, **extra_fields)
class User(SerializerModel, GuardianUserMixin, AbstractUser): class User(GuardianUserMixin, AbstractUser):
"""Custom User model to allow easier adding of user-based settings""" """Custom User model to allow easier adding of user-based settings"""
uuid = models.UUIDField(default=uuid4, editable=False) uuid = models.UUIDField(default=uuid4, editable=False)
@ -177,12 +170,6 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
always_merger.merge(final_attributes, self.attributes) always_merger.merge(final_attributes, self.attributes)
return final_attributes return final_attributes
@property
def serializer(self) -> Serializer:
from authentik.core.api.users import UserSerializer
return UserSerializer
@cached_property @cached_property
def is_superuser(self) -> bool: def is_superuser(self) -> bool:
"""Get supseruser status based on membership in a group with superuser status""" """Get supseruser status based on membership in a group with superuser status"""
@ -226,8 +213,6 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
mode: str = CONFIG.y("avatars", "none") mode: str = CONFIG.y("avatars", "none")
if mode == "none": if mode == "none":
return DEFAULT_AVATAR return DEFAULT_AVATAR
if mode.startswith("attributes."):
return get_path_from_dict(self.attributes, mode[11:], default=DEFAULT_AVATAR)
# gravatar uses md5 for their URLs, so md5 can't be avoided # gravatar uses md5 for their URLs, so md5 can't be avoided
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
if mode == "gravatar": if mode == "gravatar":
@ -289,7 +274,7 @@ class Provider(SerializerModel):
return self.name return self.name
class Application(SerializerModel, PolicyBindingModel): class Application(PolicyBindingModel):
"""Every Application which uses authentik for authentication/identification/authorization """Every Application which uses authentik for authentication/identification/authorization
needs an Application record. Other authentication types can subclass this Model to needs an Application record. Other authentication types can subclass this Model to
add custom fields and other properties""" add custom fields and other properties"""
@ -320,12 +305,6 @@ class Application(SerializerModel, PolicyBindingModel):
meta_description = models.TextField(default="", blank=True) meta_description = models.TextField(default="", blank=True)
meta_publisher = models.TextField(default="", blank=True) meta_publisher = models.TextField(default="", blank=True)
@property
def serializer(self) -> Serializer:
from authentik.core.api.applications import ApplicationSerializer
return ApplicationSerializer
@property @property
def get_meta_icon(self) -> Optional[str]: def get_meta_icon(self) -> Optional[str]:
"""Get the URL to the App Icon image. If the name is /static or starts with http """Get the URL to the App Icon image. If the name is /static or starts with http
@ -473,7 +452,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
return self.name return self.name
class UserSourceConnection(SerializerModel, CreatedUpdatedModel): class UserSourceConnection(CreatedUpdatedModel):
"""Connection between User and Source.""" """Connection between User and Source."""
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
@ -481,11 +460,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
objects = InheritanceManager() objects = InheritanceManager()
@property
def serializer(self) -> type[Serializer]:
"""Get serializer for this model"""
raise NotImplementedError
class Meta: class Meta:
unique_together = (("user", "source"),) unique_together = (("user", "source"),)
@ -540,7 +514,7 @@ class TokenIntents(models.TextChoices):
INTENT_APP_PASSWORD = "app_password" # nosec INTENT_APP_PASSWORD = "app_password" # nosec
class Token(SerializerModel, ManagedModel, ExpiringModel): class Token(ManagedModel, ExpiringModel):
"""Token used to authenticate the User for API Access or confirm another Stage like Email.""" """Token used to authenticate the User for API Access or confirm another Stage like Email."""
token_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) token_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -552,12 +526,6 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
user = models.ForeignKey("User", on_delete=models.CASCADE, related_name="+") user = models.ForeignKey("User", on_delete=models.CASCADE, related_name="+")
description = models.TextField(default="", blank=True) description = models.TextField(default="", blank=True)
@property
def serializer(self) -> type[Serializer]:
from authentik.core.api.tokens import TokenSerializer
return TokenSerializer
def expire_action(self, *args, **kwargs): def expire_action(self, *args, **kwargs):
"""Handler which is called when this object is expired.""" """Handler which is called when this object is expired."""
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction

View File

@ -7,12 +7,6 @@
<script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script> <script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script>
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
<script>
window.authentik = {};
window.authentik.locale = "{{ tenant.default_locale }}";
window.authentik.config = JSON.parse('{{ config_json|escapejs }}');
window.authentik.tenant = JSON.parse('{{ tenant_json|escapejs }}');
</script>
{% endblock %} {% endblock %}
{% block body %} {% block body %}

View File

@ -10,10 +10,9 @@
<script>ShadyDOM = { force: !navigator.webdriver };</script> <script>ShadyDOM = { force: !navigator.webdriver };</script>
{% endif %} {% endif %}
<script> <script>
window.authentik = {}; window.authentik = {
window.authentik.locale = "{{ tenant.default_locale }}"; "locale": "{{ tenant.default_locale }}",
window.authentik.config = JSON.parse('{{ config_json|escapejs }}'); };
window.authentik.tenant = JSON.parse('{{ tenant_json|escapejs }}');
window.authentik.flow = { window.authentik.flow = {
"layout": "{{ flow.layout }}", "layout": "{{ flow.layout }}",
}; };

View File

@ -7,12 +7,6 @@
<script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script> <script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script>
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
<script>
window.authentik = {};
window.authentik.locale = "{{ tenant.default_locale }}";
window.authentik.config = JSON.parse('{{ config_json|escapejs }}');
window.authentik.tenant = JSON.parse('{{ tenant_json|escapejs }}');
</script>
{% endblock %} {% endblock %}
{% block body %} {% block body %}

View File

@ -1,13 +1,10 @@
"""Test Users API""" """Test Users API"""
from json import loads
from django.urls.base import reverse from django.urls.base import reverse
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from authentik.core.models import User from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
from authentik.flows.models import FlowDesignation from authentik.flows.models import FlowDesignation
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id, generate_key from authentik.lib.generators import generate_id, generate_key
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.tenants.models import Tenant from authentik.tenants.models import Tenant
@ -214,47 +211,3 @@ class TestUsersAPI(APITestCase):
self.assertJSONEqual( self.assertJSONEqual(
response.content.decode(), {"path": ["No empty segments in user path allowed."]} response.content.decode(), {"path": ["No empty segments in user path allowed."]}
) )
def test_me(self):
"""Test user's me endpoint"""
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-me"))
self.assertEqual(response.status_code, 200)
@CONFIG.patch("avatars", "none")
def test_avatars_none(self):
"""Test avatars none"""
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-me"))
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(body["user"]["avatar"], "/static/dist/assets/images/user_default.png")
@CONFIG.patch("avatars", "gravatar")
def test_avatars_gravatar(self):
"""Test avatars gravatar"""
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-me"))
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertIn("gravatar", body["user"]["avatar"])
@CONFIG.patch("avatars", "foo-%(username)s")
def test_avatars_custom(self):
"""Test avatars custom"""
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-me"))
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(body["user"]["avatar"], f"foo-{self.admin.username}")
@CONFIG.patch("avatars", "attributes.foo.avatar")
def test_avatars_attributes(self):
"""Test avatars attributes"""
self.admin.attributes = {"foo": {"avatar": "bar"}}
self.admin.save()
self.client.force_login(self.admin)
response = self.client.get(reverse("authentik_api:user-me"))
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(body["user"]["avatar"], "bar")

View File

@ -4,10 +4,11 @@ from django.contrib.auth.decorators import login_required
from django.urls import path from django.urls import path
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.generic import RedirectView from django.views.generic import RedirectView
from django.views.generic.base import TemplateView
from authentik.core.views import apps, impersonate from authentik.core.views import apps, impersonate
from authentik.core.views.debug import AccessDeniedView from authentik.core.views.debug import AccessDeniedView
from authentik.core.views.interface import FlowInterfaceView, InterfaceView from authentik.core.views.interface import FlowInterfaceView
from authentik.core.views.session import EndSessionView from authentik.core.views.session import EndSessionView
urlpatterns = [ urlpatterns = [
@ -38,12 +39,12 @@ urlpatterns = [
# Interfaces # Interfaces
path( path(
"if/admin/", "if/admin/",
ensure_csrf_cookie(InterfaceView.as_view(template_name="if/admin.html")), ensure_csrf_cookie(TemplateView.as_view(template_name="if/admin.html")),
name="if-admin", name="if-admin",
), ),
path( path(
"if/user/", "if/user/",
ensure_csrf_cookie(InterfaceView.as_view(template_name="if/user.html")), ensure_csrf_cookie(TemplateView.as_view(template_name="if/user.html")),
name="if-user", name="if-user",
), ),
path( path(
@ -57,10 +58,10 @@ urlpatterns = [
name="if-session-end", name="if-session-end",
), ),
# Fallback for WS # Fallback for WS
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")), path("ws/outpost/<uuid:pk>/", TemplateView.as_view(template_name="if/admin.html")),
path( path(
"ws/client/", "ws/client/",
InterfaceView.as_view(template_name="if/admin.html"), TemplateView.as_view(template_name="if/admin.html"),
), ),
] ]

View File

@ -1,26 +1,13 @@
"""Interface views""" """Interface views"""
from json import dumps
from typing import Any from typing import Any
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from django.views.generic.base import TemplateView from django.views.generic.base import TemplateView
from rest_framework.request import Request
from authentik.api.v3.config import ConfigView
from authentik.flows.models import Flow from authentik.flows.models import Flow
from authentik.tenants.api import CurrentTenantSerializer
class InterfaceView(TemplateView): class FlowInterfaceView(TemplateView):
"""Base interface view"""
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
kwargs["config_json"] = dumps(ConfigView(request=Request(self.request)).get_config().data)
kwargs["tenant_json"] = dumps(CurrentTenantSerializer(self.request.tenant).data)
return super().get_context_data(**kwargs)
class FlowInterfaceView(InterfaceView):
"""Flow interface""" """Flow interface"""
template_name = "if/flow.html" template_name = "if/flow.html"

View File

@ -22,8 +22,8 @@ from structlog.stdlib import get_logger
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.crypto.apps import MANAGED_KEY
from authentik.crypto.builder import CertificateBuilder from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.managed import MANAGED_KEY
from authentik.crypto.models import CertificateKeyPair from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction

View File

@ -1,72 +1,16 @@
"""authentik crypto app config""" """authentik crypto app config"""
from datetime import datetime from importlib import import_module
from typing import TYPE_CHECKING, Optional
from authentik.blueprints.manager import ManagedAppConfig from django.apps import AppConfig
from authentik.lib.generators import generate_id
if TYPE_CHECKING:
from authentik.crypto.models import CertificateKeyPair
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
class AuthentikCryptoConfig(ManagedAppConfig): class AuthentikCryptoConfig(AppConfig):
"""authentik crypto app config""" """authentik crypto app config"""
name = "authentik.crypto" name = "authentik.crypto"
label = "authentik_crypto" label = "authentik_crypto"
verbose_name = "authentik Crypto" verbose_name = "authentik Crypto"
default = True
def reconcile_load_crypto_tasks(self): def ready(self):
"""Load crypto tasks""" import_module("authentik.crypto.managed")
self.import_module("authentik.crypto.tasks") import_module("authentik.crypto.tasks")
def _create_update_cert(self, cert: Optional["CertificateKeyPair"] = None):
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
builder = CertificateBuilder()
builder.common_name = "goauthentik.io"
builder.build(
subject_alt_names=["goauthentik.io"],
validity_days=360,
)
if not cert:
cert = CertificateKeyPair()
cert.certificate_data = builder.certificate
cert.key_data = builder.private_key
cert.name = "authentik Internal JWT Certificate"
cert.managed = MANAGED_KEY
cert.save()
def reconcile_managed_jwt_cert(self):
"""Ensure managed JWT certificate"""
from authentik.crypto.models import CertificateKeyPair
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
if not certs.exists():
self._create_update_cert()
return
cert: CertificateKeyPair = certs.first()
now = datetime.now()
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
self._create_update_cert(cert)
def reconcile_self_signed(self):
"""Create self-signed keypair"""
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
name = "authentik Self-signed Certificate"
if CertificateKeyPair.objects.filter(name=name).exists():
return
builder = CertificateBuilder()
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
CertificateKeyPair.objects.create(
name="authentik Self-signed Certificate",
certificate_data=builder.certificate,
key_data=builder.private_key,
)

View File

@ -0,0 +1,40 @@
"""Crypto managed objects"""
from datetime import datetime
from typing import Optional
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
from authentik.managed.manager import ObjectManager
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
class CryptoManager(ObjectManager):
"""Crypto managed objects"""
def _create(self, cert: Optional[CertificateKeyPair] = None):
builder = CertificateBuilder()
builder.common_name = "goauthentik.io"
builder.build(
subject_alt_names=["goauthentik.io"],
validity_days=360,
)
if not cert:
cert = CertificateKeyPair()
cert.certificate_data = builder.certificate
cert.key_data = builder.private_key
cert.name = "authentik Internal JWT Certificate"
cert.managed = MANAGED_KEY
cert.save()
def reconcile(self):
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
if not certs.exists():
self._create()
return []
cert: CertificateKeyPair = certs.first()
now = datetime.now()
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
self._create(cert)
return []
return []

View File

@ -5,10 +5,24 @@ from django.db import migrations
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
def create_self_signed(apps, schema_editor):
CertificateKeyPair = apps.get_model("authentik_crypto", "CertificateKeyPair")
db_alias = schema_editor.connection.alias
from authentik.crypto.builder import CertificateBuilder
builder = CertificateBuilder()
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
CertificateKeyPair.objects.using(db_alias).create(
name="authentik Self-signed Certificate",
certificate_data=builder.certificate,
key_data=builder.private_key,
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("authentik_crypto", "0001_initial"), ("authentik_crypto", "0001_initial"),
] ]
operations = [] operations = [migrations.RunPython(create_self_signed)]

View File

@ -16,16 +16,15 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.x509 import Certificate, load_pem_x509_certificate from cryptography.x509 import Certificate, load_pem_x509_certificate
from django.db import models from django.db import models
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.blueprints.models import ManagedModel from authentik.lib.models import CreatedUpdatedModel
from authentik.lib.models import CreatedUpdatedModel, SerializerModel from authentik.managed.models import ManagedModel
LOGGER = get_logger() LOGGER = get_logger()
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel): class CertificateKeyPair(ManagedModel, CreatedUpdatedModel):
"""CertificateKeyPair that can be used for signing or encrypting if `key_data` """CertificateKeyPair that can be used for signing or encrypting if `key_data`
is set, otherwise it can be used to verify remote data.""" is set, otherwise it can be used to verify remote data."""
@ -45,12 +44,6 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
_private_key: Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey] = None _private_key: Optional[RSAPrivateKey | EllipticCurvePrivateKey | Ed25519PrivateKey] = None
_public_key: Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey] = None _public_key: Optional[RSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey] = None
@property
def serializer(self) -> Serializer:
from authentik.crypto.api import CertificateKeyPairSerializer
return CertificateKeyPairSerializer
@property @property
def certificate(self) -> Certificate: def certificate(self) -> Certificate:
"""Get python cryptography Certificate instance""" """Get python cryptography Certificate instance"""

View File

@ -1,7 +1,8 @@
"""authentik events app""" """authentik events app"""
from prometheus_client import Gauge from importlib import import_module
from authentik.blueprints.manager import ManagedAppConfig from django.apps import AppConfig
from prometheus_client import Gauge
GAUGE_TASKS = Gauge( GAUGE_TASKS = Gauge(
"authentik_system_tasks", "authentik_system_tasks",
@ -10,14 +11,12 @@ GAUGE_TASKS = Gauge(
) )
class AuthentikEventsConfig(ManagedAppConfig): class AuthentikEventsConfig(AppConfig):
"""authentik events app""" """authentik events app"""
name = "authentik.events" name = "authentik.events"
label = "authentik_events" label = "authentik_events"
verbose_name = "authentik Events" verbose_name = "authentik Events"
default = True
def reconcile_load_events_signals(self): def ready(self):
"""Load events signals""" import_module("authentik.events.signals")
self.import_module("authentik.events.signals")

View File

@ -11,6 +11,7 @@ from django.http import HttpRequest, HttpResponse
from django_otp.plugins.otp_static.models import StaticToken from django_otp.plugins.otp_static.models import StaticToken
from guardian.models import UserObjectPermission from guardian.models import UserObjectPermission
from authentik.core.middleware import LOCAL
from authentik.core.models import AuthenticatedSession, User from authentik.core.models import AuthenticatedSession, User
from authentik.events.models import Event, EventAction, Notification from authentik.events.models import Event, EventAction, Notification
from authentik.events.signals import EventNewThread from authentik.events.signals import EventNewThread
@ -44,46 +45,36 @@ class AuditMiddleware:
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]): def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
self.get_response = get_response self.get_response = get_response
def connect(self, request: HttpRequest):
"""Connect signal for automatic logging"""
if not hasattr(request, "user"):
return
if not getattr(request.user, "is_authenticated", False):
return
if not hasattr(request, "request_id"):
return
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
post_save.connect(
post_save_handler,
dispatch_uid=request.request_id,
weak=False,
)
pre_delete.connect(
pre_delete_handler,
dispatch_uid=request.request_id,
weak=False,
)
def disconnect(self, request: HttpRequest):
"""Disconnect signals"""
if not hasattr(request, "request_id"):
return
post_save.disconnect(dispatch_uid=request.request_id)
pre_delete.disconnect(dispatch_uid=request.request_id)
def __call__(self, request: HttpRequest) -> HttpResponse: def __call__(self, request: HttpRequest) -> HttpResponse:
self.connect(request) # Connect signal for automatic logging
if hasattr(request, "user") and getattr(request.user, "is_authenticated", False):
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
pre_delete_handler = partial(
self.pre_delete_handler, user=request.user, request=request
)
post_save.connect(
post_save_handler,
dispatch_uid=LOCAL.authentik["request_id"],
weak=False,
)
pre_delete.connect(
pre_delete_handler,
dispatch_uid=LOCAL.authentik["request_id"],
weak=False,
)
response = self.get_response(request) response = self.get_response(request)
self.disconnect(request) post_save.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
pre_delete.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
return response return response
# pylint: disable=unused-argument # pylint: disable=unused-argument
def process_exception(self, request: HttpRequest, exception: Exception): def process_exception(self, request: HttpRequest, exception: Exception):
"""Disconnect handlers in case of exception""" """Disconnect handlers in case of exception"""
self.disconnect(request) post_save.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
pre_delete.disconnect(dispatch_uid=LOCAL.authentik["request_id"])
if settings.DEBUG: if settings.DEBUG:
return return

View File

@ -1,5 +1,29 @@
# Generated by Django 4.0.4 on 2022-05-30 18:08 # Generated by Django 4.0.4 on 2022-05-30 18:08
from django.apps.registry import Apps
from django.db import migrations, models from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.events.models import TransportMode
def notify_local_transport(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
local_transport, _ = NotificationTransport.objects.using(db_alias).update_or_create(
name="default-local-transport",
defaults={"mode": TransportMode.LOCAL},
)
for trigger in NotificationRule.objects.using(db_alias).filter(
name__in=[
"default-notify-configuration-error",
"default-notify-exception",
"default-notify-update",
]
):
trigger.transports.add(local_transport)
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -22,4 +46,5 @@ class Migration(migrations.Migration):
default="local", default="local",
), ),
), ),
migrations.RunPython(notify_local_transport),
] ]

View File

@ -1,6 +1,130 @@
# Generated by Django 3.1.4 on 2021-01-10 18:57 # Generated by Django 3.1.4 on 2021-01-10 18:57
from django.apps.registry import Apps
from django.db import migrations from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.events.models import EventAction, NotificationSeverity, TransportMode
def notify_configuration_error(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
Group = apps.get_model("authentik_core", "Group")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
admin_group = (
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
)
policy, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
name="default-match-configuration-error",
defaults={"action": EventAction.CONFIGURATION_ERROR},
)
trigger, _ = NotificationRule.objects.using(db_alias).update_or_create(
name="default-notify-configuration-error",
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
)
trigger.transports.set(
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
)
trigger.save()
PolicyBinding.objects.using(db_alias).update_or_create(
target=trigger,
policy=policy,
defaults={
"order": 0,
},
)
def notify_update(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
Group = apps.get_model("authentik_core", "Group")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
admin_group = (
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
)
policy, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
name="default-match-update",
defaults={"action": EventAction.UPDATE_AVAILABLE},
)
trigger, _ = NotificationRule.objects.using(db_alias).update_or_create(
name="default-notify-update",
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
)
trigger.transports.set(
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
)
trigger.save()
PolicyBinding.objects.using(db_alias).update_or_create(
target=trigger,
policy=policy,
defaults={
"order": 0,
},
)
def notify_exception(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
Group = apps.get_model("authentik_core", "Group")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
EventMatcherPolicy = apps.get_model("authentik_policies_event_matcher", "EventMatcherPolicy")
NotificationRule = apps.get_model("authentik_events", "NotificationRule")
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
admin_group = (
Group.objects.using(db_alias).filter(name="authentik Admins", is_superuser=True).first()
)
policy_policy_exc, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
name="default-match-policy-exception",
defaults={"action": EventAction.POLICY_EXCEPTION},
)
policy_pm_exc, _ = EventMatcherPolicy.objects.using(db_alias).update_or_create(
name="default-match-property-mapping-exception",
defaults={"action": EventAction.PROPERTY_MAPPING_EXCEPTION},
)
trigger, _ = NotificationRule.objects.using(db_alias).update_or_create(
name="default-notify-exception",
defaults={"group": admin_group, "severity": NotificationSeverity.ALERT},
)
trigger.transports.set(
NotificationTransport.objects.using(db_alias).filter(name="default-email-transport")
)
trigger.save()
PolicyBinding.objects.using(db_alias).update_or_create(
target=trigger,
policy=policy_policy_exc,
defaults={
"order": 0,
},
)
PolicyBinding.objects.using(db_alias).update_or_create(
target=trigger,
policy=policy_pm_exc,
defaults={
"order": 1,
},
)
def transport_email_global(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
NotificationTransport = apps.get_model("authentik_events", "NotificationTransport")
NotificationTransport.objects.using(db_alias).update_or_create(
name="default-email-transport",
defaults={"mode": TransportMode.EMAIL},
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -10,6 +134,14 @@ class Migration(migrations.Migration):
"authentik_events", "authentik_events",
"0010_notification_notificationtransport_notificationrule", "0010_notification_notificationtransport_notificationrule",
), ),
("authentik_core", "0016_auto_20201202_2234"),
("authentik_policies_event_matcher", "0003_auto_20210110_1907"),
("authentik_policies", "0004_policy_execution_logging"),
] ]
operations = [] operations = [
migrations.RunPython(transport_email_global),
migrations.RunPython(notify_configuration_error),
migrations.RunPython(notify_update),
migrations.RunPython(notify_exception),
]

View File

@ -30,7 +30,7 @@ from authentik.core.middleware import (
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
from authentik.events.geo import GEOIP_READER from authentik.events.geo import GEOIP_READER
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
from authentik.lib.models import DomainlessURLValidator, SerializerModel from authentik.lib.models import DomainlessURLValidator
from authentik.lib.sentry import SentryIgnoredException from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import get_client_ip, get_http_session from authentik.lib.utils.http import get_client_ip, get_http_session
from authentik.lib.utils.time import timedelta_from_string from authentik.lib.utils.time import timedelta_from_string
@ -168,7 +168,7 @@ class EventManager(Manager):
return self.get_queryset().get_events_per_day() return self.get_queryset().get_events_per_day()
class Event(SerializerModel, ExpiringModel): class Event(ExpiringModel):
"""An individual Audit/Metrics/Notification/Error Event""" """An individual Audit/Metrics/Notification/Error Event"""
event_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) event_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -273,12 +273,6 @@ class Event(SerializerModel, ExpiringModel):
) )
super().save(*args, **kwargs) super().save(*args, **kwargs)
@property
def serializer(self) -> "Serializer":
from authentik.events.api.events import EventSerializer
return EventSerializer
@property @property
def summary(self) -> str: def summary(self) -> str:
"""Return a summary of this event.""" """Return a summary of this event."""
@ -304,7 +298,7 @@ class TransportMode(models.TextChoices):
EMAIL = "email", _("Email") EMAIL = "email", _("Email")
class NotificationTransport(SerializerModel): class NotificationTransport(models.Model):
"""Action which is executed when a Rule matches""" """Action which is executed when a Rule matches"""
uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -454,12 +448,6 @@ class NotificationTransport(SerializerModel):
except (SMTPException, ConnectionError, OSError) as exc: except (SMTPException, ConnectionError, OSError) as exc:
raise NotificationTransportError from exc raise NotificationTransportError from exc
@property
def serializer(self) -> "Serializer":
from authentik.events.api.notification_transports import NotificationTransportSerializer
return NotificationTransportSerializer
def __str__(self) -> str: def __str__(self) -> str:
return f"Notification Transport {self.name}" return f"Notification Transport {self.name}"
@ -477,7 +465,7 @@ class NotificationSeverity(models.TextChoices):
ALERT = "alert", _("Alert") ALERT = "alert", _("Alert")
class Notification(SerializerModel): class Notification(models.Model):
"""Event Notification""" """Event Notification"""
uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -488,12 +476,6 @@ class Notification(SerializerModel):
seen = models.BooleanField(default=False) seen = models.BooleanField(default=False)
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
@property
def serializer(self) -> "Serializer":
from authentik.events.api.notifications import NotificationSerializer
return NotificationSerializer
def __str__(self) -> str: def __str__(self) -> str:
body_trunc = (self.body[:75] + "..") if len(self.body) > 75 else self.body body_trunc = (self.body[:75] + "..") if len(self.body) > 75 else self.body
return f"Notification for user {self.user}: {body_trunc}" return f"Notification for user {self.user}: {body_trunc}"
@ -504,7 +486,7 @@ class Notification(SerializerModel):
verbose_name_plural = _("Notifications") verbose_name_plural = _("Notifications")
class NotificationRule(SerializerModel, PolicyBindingModel): class NotificationRule(PolicyBindingModel):
"""Decide when to create a Notification based on policies attached to this object.""" """Decide when to create a Notification based on policies attached to this object."""
name = models.TextField(unique=True) name = models.TextField(unique=True)
@ -536,12 +518,6 @@ class NotificationRule(SerializerModel, PolicyBindingModel):
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
) )
@property
def serializer(self) -> "Serializer":
from authentik.events.api.notification_rules import NotificationRuleSerializer
return NotificationRuleSerializer
def __str__(self) -> str: def __str__(self) -> str:
return f"Notification Rule {self.name}" return f"Notification Rule {self.name}"

View File

@ -1,7 +1,6 @@
"""event utilities""" """event utilities"""
import re import re
from dataclasses import asdict, is_dataclass from dataclasses import asdict, is_dataclass
from pathlib import Path
from typing import Any, Optional from typing import Any, Optional
from uuid import UUID from uuid import UUID
@ -23,31 +22,21 @@ from authentik.policies.types import PolicyRequest
ALLOWED_SPECIAL_KEYS = re.compile("passing", flags=re.I) ALLOWED_SPECIAL_KEYS = re.compile("passing", flags=re.I)
def cleanse_item(key: str, value: Any) -> Any:
"""Cleanse a single item"""
if isinstance(value, dict):
return cleanse_dict(value)
if isinstance(value, list):
for idx, item in enumerate(value):
value[idx] = cleanse_item(key, item)
return value
try:
if SafeExceptionReporterFilter.hidden_settings.search(
key
) and not ALLOWED_SPECIAL_KEYS.search(key):
return SafeExceptionReporterFilter.cleansed_substitute
except TypeError: # pragma: no cover
return value
return value
def cleanse_dict(source: dict[Any, Any]) -> dict[Any, Any]: def cleanse_dict(source: dict[Any, Any]) -> dict[Any, Any]:
"""Cleanse a dictionary, recursively""" """Cleanse a dictionary, recursively"""
final_dict = {} final_dict = {}
for key, value in source.items(): for key, value in source.items():
new_value = cleanse_item(key, value) try:
if new_value is not ...: if SafeExceptionReporterFilter.hidden_settings.search(
final_dict[key] = new_value key
) and not ALLOWED_SPECIAL_KEYS.search(key):
final_dict[key] = SafeExceptionReporterFilter.cleansed_substitute
else:
final_dict[key] = value
except TypeError: # pragma: no cover
final_dict[key] = value
if isinstance(value, dict):
final_dict[key] = cleanse_dict(value)
return final_dict return final_dict
@ -80,45 +69,6 @@ def get_user(user: User, original_user: Optional[User] = None) -> dict[str, Any]
return user_data return user_data
# pylint: disable=too-many-return-statements
def sanitize_item(value: Any) -> Any:
"""Sanitize a single item, ensure it is JSON parsable"""
if is_dataclass(value):
# Because asdict calls `copy.deepcopy(obj)` on everything that's not tuple/dict,
# and deepcopy doesn't work with HttpRequests (neither django nor rest_framework).
# Currently, the only dataclass that actually holds an http request is a PolicyRequest
if isinstance(value, PolicyRequest):
value.http_request = None
value = asdict(value)
if isinstance(value, dict):
return sanitize_dict(value)
if isinstance(value, list):
new_values = []
for item in value:
new_value = sanitize_item(item)
if new_value:
new_values.append(new_value)
return new_values
if isinstance(value, (User, AnonymousUser)):
return sanitize_dict(get_user(value))
if isinstance(value, models.Model):
return sanitize_dict(model_to_dict(value))
if isinstance(value, UUID):
return value.hex
if isinstance(value, (HttpRequest, WSGIRequest)):
return ...
if isinstance(value, City):
return GEOIP_READER.city_to_dict(value)
if isinstance(value, Path):
return str(value)
if isinstance(value, type):
return {
"type": value.__name__,
"module": value.__module__,
}
return value
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]: def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
"""clean source of all Models that would interfere with the JSONField. """clean source of all Models that would interfere with the JSONField.
Models are replaced with a dictionary of { Models are replaced with a dictionary of {
@ -128,7 +78,30 @@ def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
}""" }"""
final_dict = {} final_dict = {}
for key, value in source.items(): for key, value in source.items():
new_value = sanitize_item(value) if is_dataclass(value):
if new_value is not ...: # Because asdict calls `copy.deepcopy(obj)` on everything that's not tuple/dict,
final_dict[key] = new_value # and deepcopy doesn't work with HttpRequests (neither django nor rest_framework).
# Currently, the only dataclass that actually holds an http request is a PolicyRequest
if isinstance(value, PolicyRequest):
value.http_request = None
value = asdict(value)
if isinstance(value, dict):
final_dict[key] = sanitize_dict(value)
elif isinstance(value, (User, AnonymousUser)):
final_dict[key] = sanitize_dict(get_user(value))
elif isinstance(value, models.Model):
final_dict[key] = sanitize_dict(model_to_dict(value))
elif isinstance(value, UUID):
final_dict[key] = value.hex
elif isinstance(value, (HttpRequest, WSGIRequest)):
continue
elif isinstance(value, City):
final_dict[key] = GEOIP_READER.city_to_dict(value)
elif isinstance(value, type):
final_dict[key] = {
"type": value.__name__,
"module": value.__module__,
}
else:
final_dict[key] = value
return final_dict return final_dict

View File

@ -3,8 +3,7 @@ from dataclasses import dataclass
from django.core.cache import cache from django.core.cache import cache
from django.db.models import Model from django.db.models import Model
from django.http import HttpResponse from django.http.response import HttpResponseBadRequest, JsonResponse
from django.http.response import HttpResponseBadRequest
from django.urls import reverse from django.urls import reverse
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
@ -20,8 +19,6 @@ from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.blueprints.v1.exporter import FlowExporter
from authentik.blueprints.v1.importer import Importer
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ( from authentik.core.api.utils import (
CacheSerializer, CacheSerializer,
@ -32,6 +29,9 @@ from authentik.core.api.utils import (
from authentik.flows.exceptions import FlowNonApplicableException from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import Flow from authentik.flows.models import Flow
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner, cache_key from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner, cache_key
from authentik.flows.transfer.common import DataclassEncoder
from authentik.flows.transfer.exporter import FlowExporter
from authentik.flows.transfer.importer import FlowImporter
from authentik.flows.views.executor import SESSION_KEY_HISTORY, SESSION_KEY_PLAN from authentik.flows.views.executor import SESSION_KEY_HISTORY, SESSION_KEY_PLAN
from authentik.lib.views import bad_request_message from authentik.lib.views import bad_request_message
@ -163,13 +163,12 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
) )
@action(detail=False, methods=["POST"], parser_classes=(MultiPartParser,)) @action(detail=False, methods=["POST"], parser_classes=(MultiPartParser,))
def import_flow(self, request: Request) -> Response: def import_flow(self, request: Request) -> Response:
"""Import flow from .yaml file""" """Import flow from .akflow file"""
file = request.FILES.get("file", None) file = request.FILES.get("file", None)
if not file: if not file:
return HttpResponseBadRequest() return HttpResponseBadRequest()
importer = Importer(file.read().decode()) importer = FlowImporter(file.read().decode())
valid, _logs = importer.validate() valid = importer.validate()
# TODO: return logs
if not valid: if not valid:
return HttpResponseBadRequest() return HttpResponseBadRequest()
successful = importer.apply() successful = importer.apply()
@ -196,11 +195,11 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
@action(detail=True, pagination_class=None, filter_backends=[]) @action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument # pylint: disable=unused-argument
def export(self, request: Request, slug: str) -> Response: def export(self, request: Request, slug: str) -> Response:
"""Export flow to .yaml file""" """Export flow to .akflow file"""
flow = self.get_object() flow = self.get_object()
exporter = FlowExporter(flow) exporter = FlowExporter(flow)
response = HttpResponse(content=exporter.export_to_string()) response = JsonResponse(exporter.export(), encoder=DataclassEncoder, safe=False)
response["Content-Disposition"] = f'attachment; filename="{flow.slug}.yaml"' response["Content-Disposition"] = f'attachment; filename="{flow.slug}.akflow"'
return response return response
@extend_schema(responses={200: FlowDiagramSerializer()}) @extend_schema(responses={200: FlowDiagramSerializer()})

View File

@ -1,7 +1,10 @@
"""authentik flows app config""" """authentik flows app config"""
from importlib import import_module
from django.apps import AppConfig
from django.db.utils import ProgrammingError
from prometheus_client import Gauge, Histogram from prometheus_client import Gauge, Histogram
from authentik.blueprints.manager import ManagedAppConfig
from authentik.lib.utils.reflection import all_subclasses from authentik.lib.utils.reflection import all_subclasses
GAUGE_FLOWS_CACHED = Gauge( GAUGE_FLOWS_CACHED = Gauge(
@ -15,22 +18,20 @@ HIST_FLOWS_PLAN_TIME = Histogram(
) )
class AuthentikFlowsConfig(ManagedAppConfig): class AuthentikFlowsConfig(AppConfig):
"""authentik flows app config""" """authentik flows app config"""
name = "authentik.flows" name = "authentik.flows"
label = "authentik_flows" label = "authentik_flows"
mountpoint = "flows/" mountpoint = "flows/"
verbose_name = "authentik Flows" verbose_name = "authentik Flows"
default = True
def reconcile_load_flows_signals(self): def ready(self):
"""Load flows signals""" import_module("authentik.flows.signals")
self.import_module("authentik.flows.signals") try:
from authentik.flows.models import Stage
def reconcile_load_stages(self): for stage in all_subclasses(Stage):
"""Ensure all stages are loaded""" _ = stage().type
from authentik.flows.models import Stage except ProgrammingError:
pass
for stage in all_subclasses(Stage):
_ = stage().type

View File

@ -1,16 +1,14 @@
"""Challenge helpers""" """Challenge helpers"""
from dataclasses import asdict, is_dataclass
from enum import Enum from enum import Enum
from typing import TYPE_CHECKING, Optional, TypedDict from typing import TYPE_CHECKING, Optional, TypedDict
from uuid import UUID
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models from django.db import models
from django.http import JsonResponse from django.http import JsonResponse
from rest_framework.fields import ChoiceField, DictField from rest_framework.fields import ChoiceField, DictField
from rest_framework.serializers import CharField from rest_framework.serializers import CharField
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.flows.transfer.common import DataclassEncoder
if TYPE_CHECKING: if TYPE_CHECKING:
from authentik.flows.stage import StageView from authentik.flows.stage import StageView
@ -107,7 +105,7 @@ class PermissionDict(TypedDict):
class PermissionSerializer(PassiveSerializer): class PermissionSerializer(PassiveSerializer):
"""Permission used for consent""" """Permission used for consent"""
name = CharField(allow_blank=True) name = CharField()
id = CharField() id = CharField()
@ -137,19 +135,6 @@ class AutoSubmitChallengeResponse(ChallengeResponse):
component = CharField(default="ak-stage-autosubmit") component = CharField(default="ak-stage-autosubmit")
class DataclassEncoder(DjangoJSONEncoder):
"""Convert any dataclass to json"""
def default(self, o):
if is_dataclass(o):
return asdict(o)
if isinstance(o, UUID):
return str(o)
if isinstance(o, Enum):
return o.value
return super().default(o) # pragma: no cover
class HttpChallengeResponse(JsonResponse): class HttpChallengeResponse(JsonResponse):
"""Subclass of JsonResponse that uses the `DataclassEncoder`""" """Subclass of JsonResponse that uses the `DataclassEncoder`"""

View File

@ -0,0 +1,22 @@
"""Apply flow from commandline"""
from django.core.management.base import BaseCommand, no_translations
from authentik.flows.transfer.importer import FlowImporter
class Command(BaseCommand): # pragma: no cover
"""Apply flow from commandline"""
@no_translations
def handle(self, *args, **options):
"""Apply all flows in order, abort when one fails to import"""
for flow_path in options.get("flows", []):
with open(flow_path, "r", encoding="utf8") as flow_file:
importer = FlowImporter(flow_file.read())
valid = importer.validate()
if not valid:
raise ValueError("Flow invalid")
importer.apply()
def add_arguments(self, parser):
parser.add_argument("flows", nargs="+", type=str)

View File

@ -48,7 +48,7 @@ class FlowPlanProcess(PROCESS_CLASS): # pragma: no cover
self.return_dict[self.index] = diffs self.return_dict[self.index] = diffs
class Command(BaseCommand): class Command(BaseCommand): # pragma: no cover
"""Benchmark authentik""" """Benchmark authentik"""
def add_arguments(self, parser): def add_arguments(self, parser):

View File

@ -1,12 +1,103 @@
# Generated by Django 3.0.3 on 2020-05-08 14:30 # Generated by Django 3.0.3 on 2020-05-08 14:30
from django.apps.registry import Apps
from django.db import migrations from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.flows.models import FlowDesignation
from authentik.stages.identification.models import UserFields
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
def create_default_authentication_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
PasswordStage = apps.get_model("authentik_stages_password", "PasswordStage")
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
IdentificationStage = apps.get_model("authentik_stages_identification", "IdentificationStage")
db_alias = schema_editor.connection.alias
identification_stage, _ = IdentificationStage.objects.using(db_alias).update_or_create(
name="default-authentication-identification",
defaults={
"user_fields": [UserFields.E_MAIL, UserFields.USERNAME],
},
)
password_stage, _ = PasswordStage.objects.using(db_alias).update_or_create(
name="default-authentication-password",
defaults={"backends": [BACKEND_INBUILT, BACKEND_LDAP, BACKEND_APP_PASSWORD]},
)
login_stage, _ = UserLoginStage.objects.using(db_alias).update_or_create(
name="default-authentication-login"
)
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-authentication-flow",
designation=FlowDesignation.AUTHENTICATION,
defaults={
"name": "Welcome to authentik!",
},
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=identification_stage,
defaults={
"order": 10,
},
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=password_stage,
defaults={
"order": 20,
},
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=login_stage,
defaults={
"order": 100,
},
)
def create_default_invalidation_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
UserLogoutStage = apps.get_model("authentik_stages_user_logout", "UserLogoutStage")
db_alias = schema_editor.connection.alias
UserLogoutStage.objects.using(db_alias).update_or_create(name="default-invalidation-logout")
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-invalidation-flow",
designation=FlowDesignation.INVALIDATION,
defaults={
"name": "Logout",
},
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=UserLogoutStage.objects.using(db_alias).first(),
defaults={
"order": 0,
},
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("authentik_flows", "0007_auto_20200703_2059"), ("authentik_flows", "0007_auto_20200703_2059"),
("authentik_stages_user_login", "0001_initial"),
("authentik_stages_user_logout", "0001_initial"),
("authentik_stages_password", "0001_initial"),
("authentik_stages_identification", "0001_initial"),
] ]
operations = [] operations = [
migrations.RunPython(create_default_authentication_flow),
migrations.RunPython(create_default_invalidation_flow),
]

View File

@ -1,12 +1,151 @@
# Generated by Django 3.0.6 on 2020-05-23 15:47 # Generated by Django 3.0.6 on 2020-05-23 15:47
from django.apps.registry import Apps
from django.db import migrations from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.flows.models import FlowDesignation
from authentik.stages.prompt.models import FieldTypes
FLOW_POLICY_EXPRESSION = """# This policy ensures that this flow can only be used when the user
# is in a SSO Flow (meaning they come from an external IdP)
return ak_is_sso_flow"""
PROMPT_POLICY_EXPRESSION = """# Check if we've not been given a username by the external IdP
# and trigger the enrollment flow
return 'username' not in context.get('prompt_data', {})"""
def create_default_source_enrollment_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
PromptStage = apps.get_model("authentik_stages_prompt", "PromptStage")
Prompt = apps.get_model("authentik_stages_prompt", "Prompt")
UserWriteStage = apps.get_model("authentik_stages_user_write", "UserWriteStage")
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
db_alias = schema_editor.connection.alias
# Create a policy that only allows this flow when doing an SSO Request
flow_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
name="default-source-enrollment-if-sso",
defaults={"expression": FLOW_POLICY_EXPRESSION},
)
# This creates a Flow used by sources to enroll users
# It makes sure that a username is set, and if not, prompts the user for a Username
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-source-enrollment",
designation=FlowDesignation.ENROLLMENT,
defaults={
"name": "Welcome to authentik! Please select a username.",
},
)
PolicyBinding.objects.using(db_alias).update_or_create(
policy=flow_policy, target=flow, defaults={"order": 0}
)
# PromptStage to ask user for their username
prompt_stage, _ = PromptStage.objects.using(db_alias).update_or_create(
name="default-source-enrollment-prompt",
)
prompt, _ = Prompt.objects.using(db_alias).update_or_create(
field_key="username",
defaults={
"label": "Username",
"type": FieldTypes.TEXT,
"required": True,
"placeholder": "Username",
"order": 100,
},
)
prompt_stage.fields.add(prompt)
# Policy to only trigger prompt when no username is given
prompt_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
name="default-source-enrollment-if-username",
defaults={"expression": PROMPT_POLICY_EXPRESSION},
)
# UserWrite stage to create the user, and login stage to log user in
user_write, _ = UserWriteStage.objects.using(db_alias).update_or_create(
name="default-source-enrollment-write"
)
user_login, _ = UserLoginStage.objects.using(db_alias).update_or_create(
name="default-source-enrollment-login"
)
binding, _ = FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=prompt_stage,
defaults={"order": 0, "re_evaluate_policies": True},
)
PolicyBinding.objects.using(db_alias).update_or_create(
policy=prompt_policy, target=binding, defaults={"order": 0}
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow, stage=user_write, defaults={"order": 1}
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow, stage=user_login, defaults={"order": 2}
)
def create_default_source_authentication_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
db_alias = schema_editor.connection.alias
# Create a policy that only allows this flow when doing an SSO Request
flow_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
name="default-source-authentication-if-sso",
defaults={
"expression": FLOW_POLICY_EXPRESSION,
},
)
# This creates a Flow used by sources to authenticate users
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-source-authentication",
designation=FlowDesignation.AUTHENTICATION,
defaults={
"name": "Welcome to authentik!",
},
)
PolicyBinding.objects.using(db_alias).update_or_create(
policy=flow_policy, target=flow, defaults={"order": 0}
)
user_login, _ = UserLoginStage.objects.using(db_alias).update_or_create(
name="default-source-authentication-login"
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow, stage=user_login, defaults={"order": 0}
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("authentik_flows", "0008_default_flows"), ("authentik_flows", "0008_default_flows"),
("authentik_policies", "0001_initial"),
("authentik_policies_expression", "0001_initial"),
("authentik_stages_prompt", "0001_initial"),
("authentik_stages_user_write", "0001_initial"),
("authentik_stages_user_login", "0001_initial"),
] ]
operations = [] operations = [
migrations.RunPython(create_default_source_enrollment_flow),
migrations.RunPython(create_default_source_authentication_flow),
]

View File

@ -1,12 +1,46 @@
# Generated by Django 3.0.6 on 2020-05-24 11:34 # Generated by Django 3.0.6 on 2020-05-24 11:34
from django.apps.registry import Apps
from django.db import migrations from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.flows.models import FlowDesignation
def create_default_provider_authorization_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
ConsentStage = apps.get_model("authentik_stages_consent", "ConsentStage")
db_alias = schema_editor.connection.alias
# Empty flow for providers where consent is implicitly given
Flow.objects.using(db_alias).update_or_create(
slug="default-provider-authorization-implicit-consent",
designation=FlowDesignation.AUTHORIZATION,
defaults={"name": "Authorize Application"},
)
# Flow with consent form to obtain explicit user consent
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-provider-authorization-explicit-consent",
designation=FlowDesignation.AUTHORIZATION,
defaults={"name": "Authorize Application"},
)
stage, _ = ConsentStage.objects.using(db_alias).update_or_create(
name="default-provider-authorization-consent"
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow, stage=stage, defaults={"order": 0}
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("authentik_flows", "0009_source_flows"), ("authentik_flows", "0009_source_flows"),
("authentik_stages_consent", "0001_initial"),
] ]
operations = [] operations = [migrations.RunPython(create_default_provider_authorization_flow)]

View File

@ -1,5 +1,27 @@
# Generated by Django 3.1 on 2020-08-28 13:14 # Generated by Django 3.1 on 2020-08-28 13:14
from django.apps.registry import Apps
from django.db import migrations, models from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def add_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
slug_title_map = {
"default-authentication-flow": "Welcome to authentik!",
"default-invalidation-flow": "Default Invalidation Flow",
"default-source-enrollment": "Welcome to authentik! Please select a username.",
"default-source-authentication": "Welcome to authentik!",
"default-provider-authorization-implicit-consent": "Redirecting to %(app)s",
"default-provider-authorization-explicit-consent": "Redirecting to %(app)s",
"default-password-change": "Change password",
}
db_alias = schema_editor.connection.alias
Flow = apps.get_model("authentik_flows", "Flow")
for flow in Flow.objects.using(db_alias).all():
if flow.slug in slug_title_map:
flow.title = slug_title_map[flow.slug]
else:
flow.title = flow.name
flow.save()
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -23,6 +45,7 @@ class Migration(migrations.Migration):
field=models.TextField(default="", blank=True), field=models.TextField(default="", blank=True),
preserve_default=False, preserve_default=False,
), ),
migrations.RunPython(add_title_for_defaults),
migrations.AlterField( migrations.AlterField(
model_name="flow", model_name="flow",
name="title", name="title",

View File

@ -1,6 +1,27 @@
# Generated by Django 3.1.1 on 2020-09-25 23:32 # Generated by Django 3.1.1 on 2020-09-25 23:32
from django.apps.registry import Apps
from django.db import migrations, models from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
# First stage for default-source-enrollment flow (prompt stage)
# needs to have its policy re-evaluated
def update_default_source_enrollment_flow_binding(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
):
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
db_alias = schema_editor.connection.alias
flows = Flow.objects.using(db_alias).filter(slug="default-source-enrollment")
if not flows.exists():
return
flow = flows.first()
binding = FlowStageBinding.objects.get(target=flow, order=0)
binding.re_evaluate_policies = True
binding.save()
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -26,4 +47,5 @@ class Migration(migrations.Migration):
help_text="When this option is enabled, the planner will re-evaluate policies bound to this binding.", help_text="When this option is enabled, the planner will re-evaluate policies bound to this binding.",
), ),
), ),
migrations.RunPython(update_default_source_enrollment_flow_binding),
] ]

View File

@ -1,12 +1,141 @@
# Generated by Django 3.1.7 on 2021-04-06 13:25 # Generated by Django 3.1.7 on 2021-04-06 13:25
from django.apps.registry import Apps
from django.contrib.auth.hashers import is_password_usable
from django.db import migrations from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from authentik.flows.models import FlowDesignation
PW_USABLE_POLICY_EXPRESSION = """# This policy ensures that the setup flow can only be
# executed when the admin user doesn't have a password set
akadmin = ak_user_by(username="akadmin")
return not akadmin.has_usable_password()"""
PREFILL_POLICY_EXPRESSION = """# This policy sets the user for the currently running flow
# by injecting "pending_user"
akadmin = ak_user_by(username="akadmin")
context["flow_plan"].context["pending_user"] = akadmin
return True"""
def create_default_oobe_flow(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.stages.prompt.models import FieldTypes
User = apps.get_model("authentik_core", "User")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
Flow = apps.get_model("authentik_flows", "Flow")
FlowStageBinding = apps.get_model("authentik_flows", "FlowStageBinding")
UserLoginStage = apps.get_model("authentik_stages_user_login", "UserLoginStage")
UserWriteStage = apps.get_model("authentik_stages_user_write", "UserWriteStage")
PromptStage = apps.get_model("authentik_stages_prompt", "PromptStage")
Prompt = apps.get_model("authentik_stages_prompt", "Prompt")
ExpressionPolicy = apps.get_model("authentik_policies_expression", "ExpressionPolicy")
db_alias = schema_editor.connection.alias
# Only create the flow if the akadmin user exists,
# and has an un-usable password
akadmins = User.objects.filter(username="akadmin")
if not akadmins.exists():
return
akadmin = akadmins.first()
if is_password_usable(akadmin.password):
return
# Create a policy that sets the flow's user
prefill_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
name="default-oobe-prefill-user",
defaults={"expression": PREFILL_POLICY_EXPRESSION},
)
password_usable_policy, _ = ExpressionPolicy.objects.using(db_alias).update_or_create(
name="default-oobe-password-usable",
defaults={"expression": PW_USABLE_POLICY_EXPRESSION},
)
prompt_header, _ = Prompt.objects.using(db_alias).update_or_create(
field_key="oobe-header-text",
defaults={
"label": "oobe-header-text",
"type": FieldTypes.STATIC,
"placeholder": "Welcome to authentik! Please set a password for the default admin user, akadmin.",
"order": 100,
},
)
prompt_email, _ = Prompt.objects.using(db_alias).update_or_create(
field_key="email",
defaults={
"label": "Email",
"type": FieldTypes.EMAIL,
"placeholder": "Admin email",
"order": 101,
},
)
password_first = Prompt.objects.using(db_alias).get(field_key="password")
password_second = Prompt.objects.using(db_alias).get(field_key="password_repeat")
prompt_stage, _ = PromptStage.objects.using(db_alias).update_or_create(
name="default-oobe-password",
)
prompt_stage.fields.set([prompt_header, prompt_email, password_first, password_second])
prompt_stage.save()
user_write, _ = UserWriteStage.objects.using(db_alias).update_or_create(
name="default-password-change-write"
)
login_stage, _ = UserLoginStage.objects.using(db_alias).update_or_create(
name="default-authentication-login"
)
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="initial-setup",
designation=FlowDesignation.STAGE_CONFIGURATION,
defaults={
"name": "default-oobe-setup",
"title": "Welcome to authentik!",
},
)
PolicyBinding.objects.using(db_alias).update_or_create(
policy=password_usable_policy, target=flow, defaults={"order": 0}
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=prompt_stage,
defaults={
"order": 10,
},
)
user_write_binding, _ = FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=user_write,
defaults={"order": 20, "evaluate_on_plan": False, "re_evaluate_policies": True},
)
PolicyBinding.objects.using(db_alias).update_or_create(
policy=prefill_policy, target=user_write_binding, defaults={"order": 0}
)
FlowStageBinding.objects.using(db_alias).update_or_create(
target=flow,
stage=login_stage,
defaults={
"order": 100,
},
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("authentik_flows", "0017_auto_20210329_1334"), ("authentik_flows", "0017_auto_20210329_1334"),
("authentik_stages_user_write", "0002_auto_20200918_1653"),
("authentik_stages_user_login", "0003_session_duration_delta"),
("authentik_stages_password", "0002_passwordstage_change_flow"),
("authentik_policies", "0001_initial"),
("authentik_policies_expression", "0001_initial"),
] ]
operations = [] operations = [
migrations.RunPython(create_default_oobe_flow),
]

View File

@ -1,5 +1,21 @@
# Generated by Django 4.0 on 2021-12-27 21:03 # Generated by Django 4.0 on 2021-12-27 21:03
from django.db import migrations from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def update_title_for_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
slug_title_map = {
"default-provider-authorization-implicit-consent": "Redirecting to %(app)s",
"default-provider-authorization-explicit-consent": "Redirecting to %(app)s",
}
db_alias = schema_editor.connection.alias
Flow = apps.get_model("authentik_flows", "Flow")
for flow in Flow.objects.using(db_alias).all():
if flow.slug not in slug_title_map:
continue
flow.title = slug_title_map[flow.slug]
flow.save()
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -8,4 +24,4 @@ class Migration(migrations.Migration):
("authentik_flows", "0020_flowtoken"), ("authentik_flows", "0020_flowtoken"),
] ]
operations = [] operations = [migrations.RunPython(update_title_for_defaults)]

View File

@ -165,7 +165,7 @@ class Flow(SerializerModel, PolicyBindingModel):
stages = models.ManyToManyField(Stage, through="FlowStageBinding", blank=True) stages = models.ManyToManyField(Stage, through="FlowStageBinding", blank=True)
@property @property
def serializer(self) -> type[BaseSerializer]: def serializer(self) -> BaseSerializer:
from authentik.flows.api.flows import FlowSerializer from authentik.flows.api.flows import FlowSerializer
return FlowSerializer return FlowSerializer
@ -225,7 +225,7 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
objects = InheritanceManager() objects = InheritanceManager()
@property @property
def serializer(self) -> type[BaseSerializer]: def serializer(self) -> BaseSerializer:
from authentik.flows.api.bindings import FlowStageBindingSerializer from authentik.flows.api.bindings import FlowStageBindingSerializer
return FlowStageBindingSerializer return FlowStageBindingSerializer

View File

@ -1,56 +1,53 @@
"""Test blueprints v1""" """Test flow transfer"""
from json import dumps
from django.test import TransactionTestCase from django.test import TransactionTestCase
from authentik.blueprints.v1.exporter import FlowExporter
from authentik.blueprints.v1.importer import Importer, transaction_rollback
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
from authentik.flows.transfer.common import DataclassEncoder
from authentik.flows.transfer.exporter import FlowExporter
from authentik.flows.transfer.importer import FlowImporter, transaction_rollback
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.policies.expression.models import ExpressionPolicy from authentik.policies.expression.models import ExpressionPolicy
from authentik.policies.models import PolicyBinding from authentik.policies.models import PolicyBinding
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
from authentik.stages.user_login.models import UserLoginStage from authentik.stages.user_login.models import UserLoginStage
STATIC_PROMPT_EXPORT = """version: 1 STATIC_PROMPT_EXPORT = """{
entries: "version": 1,
- identifiers: "entries": [
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4 {
model: authentik_stages_prompt.prompt "identifiers": {
attrs: "pk": "cb954fd4-65a5-4ad9-b1ee-180ee9559cf4"
field_key: username },
label: Username "model": "authentik_stages_prompt.prompt",
type: username "attrs": {
required: true "field_key": "username",
placeholder: Username "label": "Username",
order: 0 "type": "username",
""" "required": true,
"placeholder": "Username",
YAML_TAG_TESTS = """version: 1 "order": 0
context: }
foo: bar }
entries: ]
- attrs: }"""
expression: return True
identifiers:
name: !Format [foo-%s-%s, !Context foo, !Context bar]
id: default-source-enrollment-if-username
model: authentik_policies_expression.expressionpolicy
"""
class TestBlueprintsV1(TransactionTestCase): class TestFlowTransfer(TransactionTestCase):
"""Test Blueprints""" """Test flow transfer"""
def test_blueprint_invalid_format(self): def test_bundle_invalid_format(self):
"""Test blueprint with invalid format""" """Test bundle with invalid format"""
importer = Importer('{"version": 3}') importer = FlowImporter('{"version": 3}')
self.assertFalse(importer.validate()[0]) self.assertFalse(importer.validate())
importer = Importer( importer = FlowImporter(
( (
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},' '{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
'"model": "authentik_core.User"}]}' '"model": "authentik_core.User"}]}'
) )
) )
self.assertFalse(importer.validate()[0]) self.assertFalse(importer.validate())
def test_export_validate_import(self): def test_export_validate_import(self):
"""Test export and validate it""" """Test export and validate it"""
@ -73,10 +70,10 @@ class TestBlueprintsV1(TransactionTestCase):
exporter = FlowExporter(flow) exporter = FlowExporter(flow)
export = exporter.export() export = exporter.export()
self.assertEqual(len(export.entries), 3) self.assertEqual(len(export.entries), 3)
export_yaml = exporter.export_to_string() export_json = exporter.export_to_string()
importer = Importer(export_yaml) importer = FlowImporter(export_json)
self.assertTrue(importer.validate()[0]) self.assertTrue(importer.validate())
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
self.assertTrue(Flow.objects.filter(slug=flow_slug).exists()) self.assertTrue(Flow.objects.filter(slug=flow_slug).exists())
@ -85,26 +82,18 @@ class TestBlueprintsV1(TransactionTestCase):
"""Test export and import it twice""" """Test export and import it twice"""
count_initial = Prompt.objects.filter(field_key="username").count() count_initial = Prompt.objects.filter(field_key="username").count()
importer = Importer(STATIC_PROMPT_EXPORT) importer = FlowImporter(STATIC_PROMPT_EXPORT)
self.assertTrue(importer.validate()[0]) self.assertTrue(importer.validate())
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
count_before = Prompt.objects.filter(field_key="username").count() count_before = Prompt.objects.filter(field_key="username").count()
self.assertEqual(count_initial + 1, count_before) self.assertEqual(count_initial + 1, count_before)
importer = Importer(STATIC_PROMPT_EXPORT) importer = FlowImporter(STATIC_PROMPT_EXPORT)
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before) self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
def test_import_yaml_tags(self):
"""Test some yaml tags"""
ExpressionPolicy.objects.filter(name="foo-foo-bar").delete()
importer = Importer(YAML_TAG_TESTS, {"bar": "baz"})
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
self.assertTrue(ExpressionPolicy.objects.filter(name="foo-foo-bar"))
def test_export_validate_import_policies(self): def test_export_validate_import_policies(self):
"""Test export and validate it""" """Test export and validate it"""
flow_slug = generate_id() flow_slug = generate_id()
@ -127,10 +116,12 @@ class TestBlueprintsV1(TransactionTestCase):
PolicyBinding.objects.create(policy=flow_policy, target=fsb, order=0) PolicyBinding.objects.create(policy=flow_policy, target=fsb, order=0)
exporter = FlowExporter(flow) exporter = FlowExporter(flow)
export_yaml = exporter.export_to_string() export = exporter.export()
importer = Importer(export_yaml) export_json = dumps(export, cls=DataclassEncoder)
self.assertTrue(importer.validate()[0])
importer = FlowImporter(export_json)
self.assertTrue(importer.validate())
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists()) self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
self.assertTrue(Flow.objects.filter(slug=flow_slug).exists()) self.assertTrue(Flow.objects.filter(slug=flow_slug).exists())
@ -170,9 +161,10 @@ class TestBlueprintsV1(TransactionTestCase):
FlowStageBinding.objects.create(target=flow, stage=first_stage, order=0) FlowStageBinding.objects.create(target=flow, stage=first_stage, order=0)
exporter = FlowExporter(flow) exporter = FlowExporter(flow)
export_yaml = exporter.export_to_string() export = exporter.export()
export_json = dumps(export, cls=DataclassEncoder)
importer = Importer(export_yaml) importer = FlowImporter(export_json)
self.assertTrue(importer.validate()[0]) self.assertTrue(importer.validate())
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())

View File

@ -0,0 +1,29 @@
"""test example flows in docs"""
from glob import glob
from pathlib import Path
from typing import Callable
from django.test import TransactionTestCase
from authentik.flows.transfer.importer import FlowImporter
class TestTransferDocs(TransactionTestCase):
"""Empty class, test methods are added dynamically"""
def pbflow_tester(file_name: str) -> Callable:
"""This is used instead of subTest for better visibility"""
def tester(self: TestTransferDocs):
with open(file_name, "r", encoding="utf8") as flow_json:
importer = FlowImporter(flow_json.read())
self.assertTrue(importer.validate())
self.assertTrue(importer.apply())
return tester
for flow_file in glob("website/static/flows/*.akflow"):
method_name = Path(flow_file).stem.replace("-", "_").replace(".", "_")
setattr(TestTransferDocs, f"test_flow_{method_name}", pbflow_tester(flow_file))

View File

@ -0,0 +1,88 @@
"""transfer common classes"""
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from typing import Any
from uuid import UUID
from django.core.serializers.json import DjangoJSONEncoder
from authentik.lib.models import SerializerModel
from authentik.lib.sentry import SentryIgnoredException
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
data = dict(obj.serializer(obj).data)
to_remove = (
"policies",
"stages",
"pk",
"background",
"group",
"user",
"verbose_name",
"verbose_name_plural",
"component",
"flow_set",
"promptstage_set",
"policybindingmodel_ptr_id",
"export_url",
"meta_model_name",
)
for to_remove_name in to_remove:
if to_remove_name in data:
data.pop(to_remove_name)
for key in list(data.keys()):
if key.endswith("_obj"):
data.pop(key)
return data
@dataclass
class FlowBundleEntry:
"""Single entry of a bundle"""
identifiers: dict[str, Any]
model: str
attrs: dict[str, Any]
@staticmethod
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "FlowBundleEntry":
"""Convert a SerializerModel instance to a Bundle Entry"""
identifiers = {
"pk": model.pk,
}
all_attrs = get_attrs(model)
for extra_identifier_name in extra_identifier_names:
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name)
return FlowBundleEntry(
identifiers=identifiers,
model=f"{model._meta.app_label}.{model._meta.model_name}",
attrs=all_attrs,
)
@dataclass
class FlowBundle:
"""Dataclass used for a full export"""
version: int = field(default=1)
entries: list[FlowBundleEntry] = field(default_factory=list)
class DataclassEncoder(DjangoJSONEncoder):
"""Convert FlowBundleEntry to json"""
def default(self, o):
if is_dataclass(o):
return asdict(o)
if isinstance(o, UUID):
return str(o)
if isinstance(o, Enum):
return o.value
return super().default(o) # pragma: no cover
class EntryInvalidError(SentryIgnoredException):
"""Error raised when an entry is invalid"""

View File

@ -0,0 +1,92 @@
"""Flow exporter"""
from json import dumps
from typing import Iterator
from uuid import UUID
from django.db.models import Q
from authentik.flows.models import Flow, FlowStageBinding, Stage
from authentik.flows.transfer.common import DataclassEncoder, FlowBundle, FlowBundleEntry
from authentik.policies.models import Policy, PolicyBinding
from authentik.stages.prompt.models import PromptStage
class FlowExporter:
"""Export flow with attached stages into json"""
flow: Flow
with_policies: bool
with_stage_prompts: bool
pbm_uuids: list[UUID]
def __init__(self, flow: Flow):
self.flow = flow
self.with_policies = True
self.with_stage_prompts = True
def _prepare_pbm(self):
self.pbm_uuids = [self.flow.pbm_uuid]
self.pbm_uuids += FlowStageBinding.objects.filter(target=self.flow).values_list(
"pbm_uuid", flat=True
)
def walk_stages(self) -> Iterator[FlowBundleEntry]:
"""Convert all stages attached to self.flow into FlowBundleEntry objects"""
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
for stage in stages:
if isinstance(stage, PromptStage):
pass
yield FlowBundleEntry.from_model(stage, "name")
def walk_stage_bindings(self) -> Iterator[FlowBundleEntry]:
"""Convert all bindings attached to self.flow into FlowBundleEntry objects"""
bindings = FlowStageBinding.objects.filter(target=self.flow).select_related()
for binding in bindings:
yield FlowBundleEntry.from_model(binding, "target", "stage", "order")
def walk_policies(self) -> Iterator[FlowBundleEntry]:
"""Walk over all policies. This is done at the beginning of the export for stages that have
a direct foreign key to a policy."""
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
# all policies referenced in there we also include here
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list("pk", flat=True)
query = Q(bindings__in=self.pbm_uuids) | Q(promptstage__in=prompt_stages)
policies = Policy.objects.filter(query).select_related()
for policy in policies:
yield FlowBundleEntry.from_model(policy)
def walk_policy_bindings(self) -> Iterator[FlowBundleEntry]:
"""Walk over all policybindings relative to us. This is run at the end of the export, as
we are sure all objects exist now."""
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
for binding in bindings:
yield FlowBundleEntry.from_model(binding, "policy", "target", "order")
def walk_stage_prompts(self) -> Iterator[FlowBundleEntry]:
"""Walk over all prompts associated with any PromptStages"""
prompt_stages = PromptStage.objects.filter(flow=self.flow)
for stage in prompt_stages:
for prompt in stage.fields.all():
yield FlowBundleEntry.from_model(prompt)
def export(self) -> FlowBundle:
"""Create a list of all objects including the flow"""
if self.with_policies:
self._prepare_pbm()
bundle = FlowBundle()
bundle.entries.append(FlowBundleEntry.from_model(self.flow, "slug"))
if self.with_stage_prompts:
bundle.entries.extend(self.walk_stage_prompts())
if self.with_policies:
bundle.entries.extend(self.walk_policies())
bundle.entries.extend(self.walk_stages())
bundle.entries.extend(self.walk_stage_bindings())
if self.with_policies:
bundle.entries.extend(self.walk_policy_bindings())
return bundle
def export_to_string(self) -> str:
"""Call export and convert it to json"""
bundle = self.export()
return dumps(bundle, cls=DataclassEncoder)

View File

@ -1,11 +1,11 @@
"""Blueprint importer""" """Flow importer"""
from contextlib import contextmanager from contextlib import contextmanager
from copy import deepcopy from copy import deepcopy
from typing import Any, Optional from json import loads
from typing import Any
from dacite import from_dict from dacite import from_dict
from dacite.exceptions import DaciteError from dacite.exceptions import DaciteError
from deepmerge import always_merger
from django.apps import apps from django.apps import apps
from django.db import transaction from django.db import transaction
from django.db.models import Model from django.db.models import Model
@ -14,52 +14,14 @@ from django.db.utils import IntegrityError
from rest_framework.exceptions import ValidationError from rest_framework.exceptions import ValidationError
from rest_framework.serializers import BaseSerializer, Serializer from rest_framework.serializers import BaseSerializer, Serializer
from structlog.stdlib import BoundLogger, get_logger from structlog.stdlib import BoundLogger, get_logger
from structlog.testing import capture_logs
from structlog.types import EventDict
from yaml import load
from authentik.blueprints.v1.common import ( from authentik.flows.models import Flow, FlowStageBinding, Stage
Blueprint, from authentik.flows.transfer.common import EntryInvalidError, FlowBundle, FlowBundleEntry
BlueprintEntry,
BlueprintEntryState,
BlueprintLoader,
EntryInvalidError,
)
from authentik.core.models import (
AuthenticatedSession,
PropertyMapping,
Provider,
Source,
UserSourceConnection,
)
from authentik.flows.models import Stage
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
from authentik.outposts.models import OutpostServiceConnection from authentik.policies.models import Policy, PolicyBinding
from authentik.policies.models import Policy, PolicyBindingModel from authentik.stages.prompt.models import Prompt
ALLOWED_MODELS = (Flow, FlowStageBinding, Stage, Policy, PolicyBinding, Prompt)
def is_model_allowed(model: type[Model]) -> bool:
"""Check if model is allowed"""
# pylint: disable=imported-auth-user
from django.contrib.auth.models import Group as DjangoGroup
from django.contrib.auth.models import User as DjangoUser
excluded_models = (
DjangoUser,
DjangoGroup,
# Base classes
Provider,
Source,
PropertyMapping,
UserSourceConnection,
Stage,
OutpostServiceConnection,
Policy,
PolicyBindingModel,
# Classes that have other dependencies
AuthenticatedSession,
)
return model not in excluded_models
@contextmanager @contextmanager
@ -72,29 +34,19 @@ def transaction_rollback():
atomic.__exit__(IntegrityError, None, None) atomic.__exit__(IntegrityError, None, None)
class Importer: class FlowImporter:
"""Import Blueprint from YAML""" """Import Flow from json"""
logger: BoundLogger logger: BoundLogger
def __init__(self, yaml_input: str, context: Optional[dict] = None): def __init__(self, json_input: str):
self.__pk_map: dict[Any, Model] = {} self.__pk_map: dict[Any, Model] = {}
self.logger = get_logger() self.logger = get_logger()
import_dict = load(yaml_input, BlueprintLoader) import_dict = loads(json_input)
try: try:
self.__import = from_dict(Blueprint, import_dict) self.__import = from_dict(FlowBundle, import_dict)
except DaciteError as exc: except DaciteError as exc:
raise EntryInvalidError from exc raise EntryInvalidError from exc
context = {}
always_merger.merge(context, self.__import.context)
if context:
always_merger.merge(context, context)
self.__import.context = context
@property
def blueprint(self) -> Blueprint:
"""Get imported blueprint"""
return self.__import
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]: def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
"""Replace any value if it is a known primary key of an other object""" """Replace any value if it is a known primary key of an other object"""
@ -123,9 +75,7 @@ class Importer:
"""Generate an or'd query from all identifiers in an entry""" """Generate an or'd query from all identifiers in an entry"""
# Since identifiers can also be pk-references to other objects (see FlowStageBinding) # Since identifiers can also be pk-references to other objects (see FlowStageBinding)
# we have to ensure those references are also replaced # we have to ensure those references are also replaced
main_query = Q() main_query = Q(pk=attrs["pk"])
if "pk" in attrs:
main_query = Q(pk=attrs["pk"])
sub_query = Q() sub_query = Q()
for identifier, value in attrs.items(): for identifier, value in attrs.items():
if isinstance(value, dict): if isinstance(value, dict):
@ -135,22 +85,19 @@ class Importer:
sub_query &= Q(**{identifier: value}) sub_query &= Q(**{identifier: value})
return main_query | sub_query return main_query | sub_query
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer: def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer:
"""Validate a single entry""" """Validate a single entry"""
model_app_label, model_name = entry.model.split(".") model_app_label, model_name = entry.model.split(".")
model: type[SerializerModel] = apps.get_model(model_app_label, model_name) model: type[SerializerModel] = apps.get_model(model_app_label, model_name)
# Don't use isinstance since we don't want to check for inheritance if not isinstance(model(), ALLOWED_MODELS):
if not is_model_allowed(model):
raise EntryInvalidError(f"Model {model} not allowed") raise EntryInvalidError(f"Model {model} not allowed")
if entry.identifiers == {}:
raise EntryInvalidError("No identifiers")
# If we try to validate without referencing a possible instance # If we try to validate without referencing a possible instance
# we'll get a duplicate error, hence we load the model here and return # we'll get a duplicate error, hence we load the model here and return
# the full serializer for later usage # the full serializer for later usage
# Because a model might have multiple unique columns, we chain all identifiers together # Because a model might have multiple unique columns, we chain all identifiers together
# to create an OR query. # to create an OR query.
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import)) updated_identifiers = self.__update_pks_for_attrs(entry.identifiers)
for key, value in list(updated_identifiers.items()): for key, value in list(updated_identifiers.items()):
if isinstance(value, dict) and "pk" in value: if isinstance(value, dict) and "pk" in value:
del updated_identifiers[key] del updated_identifiers[key]
@ -167,7 +114,6 @@ class Importer:
pk=model_instance.pk, pk=model_instance.pk,
) )
serializer_kwargs["instance"] = model_instance serializer_kwargs["instance"] = model_instance
serializer_kwargs["partial"] = True
else: else:
self.logger.debug("initialise new instance", model=model, **updated_identifiers) self.logger.debug("initialise new instance", model=model, **updated_identifiers)
model_instance = model() model_instance = model()
@ -175,7 +121,7 @@ class Importer:
if "pk" in updated_identifiers: if "pk" in updated_identifiers:
model_instance.pk = updated_identifiers["pk"] model_instance.pk = updated_identifiers["pk"]
serializer_kwargs["instance"] = model_instance serializer_kwargs["instance"] = model_instance
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import)) full_data = self.__update_pks_for_attrs(entry.attrs)
full_data.update(updated_identifiers) full_data.update(updated_identifiers)
serializer_kwargs["data"] = full_data serializer_kwargs["data"] = full_data
@ -187,7 +133,7 @@ class Importer:
return serializer return serializer
def apply(self) -> bool: def apply(self) -> bool:
"""Apply (create/update) models yaml, in database transaction""" """Apply (create/update) flow json, in database transaction"""
try: try:
with transaction.atomic(): with transaction.atomic():
if not self._apply_models(): if not self._apply_models():
@ -200,9 +146,10 @@ class Importer:
return True return True
def _apply_models(self) -> bool: def _apply_models(self) -> bool:
"""Apply (create/update) models yaml""" """Apply (create/update) flow json"""
self.__pk_map = {} self.__pk_map = {}
for entry in self.__import.entries: entries = deepcopy(self.__import.entries)
for entry in entries:
model_app_label, model_name = entry.model.split(".") model_app_label, model_name = entry.model.split(".")
try: try:
model: SerializerModel = apps.get_model(model_app_label, model_name) model: SerializerModel = apps.get_model(model_app_label, model_name)
@ -215,32 +162,23 @@ class Importer:
try: try:
serializer = self._validate_single(entry) serializer = self._validate_single(entry)
except EntryInvalidError as exc: except EntryInvalidError as exc:
self.logger.warning("entry invalid", entry=entry, error=exc) self.logger.warning("entry not valid", entry=entry, error=exc)
return False return False
model = serializer.save() model = serializer.save()
if "pk" in entry.identifiers: self.__pk_map[entry.identifiers["pk"]] = model.pk
self.__pk_map[entry.identifiers["pk"]] = model.pk
entry._state = BlueprintEntryState(model)
self.logger.debug("updated model", model=model, pk=model.pk) self.logger.debug("updated model", model=model, pk=model.pk)
return True return True
def validate(self) -> tuple[bool, list[EventDict]]: def validate(self) -> bool:
"""Validate loaded blueprint export, ensure all models are allowed """Validate loaded flow export, ensure all models are allowed
and serializers have no errors""" and serializers have no errors"""
self.logger.debug("Starting blueprint import validation") self.logger.debug("Starting flow import validation")
orig_import = deepcopy(self.__import)
if self.__import.version != 1: if self.__import.version != 1:
self.logger.warning("Invalid blueprint version") self.logger.warning("Invalid bundle version")
return False, [] return False
with ( with transaction_rollback():
transaction_rollback(),
capture_logs() as logs,
):
successful = self._apply_models() successful = self._apply_models()
if not successful: if not successful:
self.logger.debug("blueprint validation failed") self.logger.debug("Flow validation failed")
for log in logs: return successful
self.logger.debug(**log)
self.__import = orig_import
return successful, logs

View File

@ -20,17 +20,6 @@ ENV_PREFIX = "AUTHENTIK"
ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local") ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
def get_path_from_dict(root: dict, path: str, sep=".", default=None):
"""Recursively walk through `root`, checking each part of `path` split by `sep`.
If at any point a dict does not exist, return default"""
for comp in path.split(sep):
if root and comp in root:
root = root.get(comp)
else:
return default
return root
class ConfigLoader: class ConfigLoader:
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with """Search through SEARCH_PATHS and load configuration. Environment variables starting with
`ENV_PREFIX` are also applied. `ENV_PREFIX` are also applied.
@ -166,7 +155,12 @@ class ConfigLoader:
# Walk sub_dicts before parsing path # Walk sub_dicts before parsing path
root = self.raw root = self.raw
# Walk each component of the path # Walk each component of the path
return get_path_from_dict(root, path, sep=sep, default=default) for comp in path.split(sep):
if root and comp in root:
root = root.get(comp)
else:
return default
return root
def y_set(self, path: str, value: Any, sep="."): def y_set(self, path: str, value: Any, sep="."):
"""Set value using same syntax as y()""" """Set value using same syntax as y()"""

View File

@ -7,10 +7,11 @@ postgresql:
port: 5432 port: 5432
password: 'env://POSTGRES_PASSWORD' password: 'env://POSTGRES_PASSWORD'
listen: web:
listen_http: 0.0.0.0:9000 listen: 0.0.0.0:9000
listen_https: 0.0.0.0:9443 listen_tls: 0.0.0.0:9443
listen_metrics: 0.0.0.0:9300 listen_metrics: 0.0.0.0:9300
outpost_port_offset: 0
redis: redis:
host: localhost host: localhost
@ -56,7 +57,6 @@ outposts:
# %(build_hash)s: Build hash if you're running a beta version # %(build_hash)s: Build hash if you're running a beta version
container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s
discover: true discover: true
disable_embedded_outpost: false
ldap: ldap:
tls: tls:
@ -78,5 +78,3 @@ gdpr_compliance: true
cert_discovery_dir: /certs cert_discovery_dir: /certs
default_token_length: 128 default_token_length: 128
impersonation: true impersonation: true
blueprints_dir: /blueprints

View File

@ -12,7 +12,7 @@ class SerializerModel(models.Model):
"""Base Abstract Model which has a serializer""" """Base Abstract Model which has a serializer"""
@property @property
def serializer(self) -> type[BaseSerializer]: def serializer(self) -> BaseSerializer:
"""Get serializer for this model""" """Get serializer for this model"""
raise NotImplementedError raise NotImplementedError

View File

@ -8,7 +8,7 @@ from channels.middleware import BaseMiddleware
from channels_redis.core import ChannelFull from channels_redis.core import ChannelFull
from django.conf import settings from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
from django.db import DatabaseError, InternalError, OperationalError, ProgrammingError from django.db import InternalError, OperationalError, ProgrammingError
from django.http.response import Http404 from django.http.response import Http404
from django_redis.exceptions import ConnectionInterrupted from django_redis.exceptions import ConnectionInterrupted
from docker.errors import DockerException from docker.errors import DockerException
@ -87,6 +87,10 @@ def sentry_init(**sentry_init_kwargs):
set_tag("authentik.build_hash", get_build_hash("tagged")) set_tag("authentik.build_hash", get_build_hash("tagged"))
set_tag("authentik.env", get_env()) set_tag("authentik.env", get_env())
set_tag("authentik.component", "backend") set_tag("authentik.component", "backend")
LOGGER.info(
"Error reporting is enabled",
env=kwargs["environment"],
)
def traces_sampler(sampling_context: dict) -> float: def traces_sampler(sampling_context: dict) -> float:
@ -112,7 +116,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
# Django Errors # Django Errors
Error, Error,
ImproperlyConfigured, ImproperlyConfigured,
DatabaseError,
OperationalError, OperationalError,
InternalError, InternalError,
ProgrammingError, ProgrammingError,

View File

@ -0,0 +1,8 @@
"""authentik lib template utilities"""
from django.template import Context, loader
def render_to_string(template_path: str, ctx: Context) -> str:
"""Render a template to string"""
template = loader.get_template(template_path)
return template.render(ctx)

8
authentik/managed/api.py Normal file
View File

@ -0,0 +1,8 @@
"""Serializer mixin for managed models"""
from rest_framework.fields import CharField
class ManagedSerializer:
"""Managed Serializer"""
managed = CharField(read_only=True, allow_null=True)

15
authentik/managed/apps.py Normal file
View File

@ -0,0 +1,15 @@
"""authentik Managed app"""
from django.apps import AppConfig
class AuthentikManagedConfig(AppConfig):
"""authentik Managed app"""
name = "authentik.managed"
label = "authentik_managed"
verbose_name = "authentik Managed"
def ready(self) -> None:
from authentik.managed.tasks import managed_reconcile
managed_reconcile.delay()

View File

@ -0,0 +1,70 @@
"""Managed objects manager"""
from typing import Callable, Optional
from structlog.stdlib import get_logger
from authentik.managed.models import ManagedModel
LOGGER = get_logger()
class EnsureOp:
"""Ensure operation, executed as part of an ObjectManager run"""
_obj: type[ManagedModel]
_managed_uid: str
_kwargs: dict
def __init__(self, obj: type[ManagedModel], managed_uid: str, **kwargs) -> None:
self._obj = obj
self._managed_uid = managed_uid
self._kwargs = kwargs
def run(self):
"""Do the actual ensure action"""
raise NotImplementedError
class EnsureExists(EnsureOp):
"""Ensure object exists, with kwargs as given values"""
created_callback: Optional[Callable]
def __init__(
self,
obj: type[ManagedModel],
managed_uid: str,
created_callback: Optional[Callable] = None,
**kwargs,
) -> None:
super().__init__(obj, managed_uid, **kwargs)
self.created_callback = created_callback
def run(self):
self._kwargs.setdefault("managed", self._managed_uid)
obj, created = self._obj.objects.update_or_create(
**{
"managed": self._managed_uid,
"defaults": self._kwargs,
}
)
if created and self.created_callback is not None:
self.created_callback(obj)
class ObjectManager:
"""Base class for Apps Object manager"""
def run(self):
"""Main entrypoint for tasks, iterate through all implementation of this
and execute all operations"""
for sub in ObjectManager.__subclasses__():
sub_inst = sub()
ops = sub_inst.reconcile()
LOGGER.debug("Reconciling managed objects", manager=sub.__name__)
for operation in ops:
operation.run()
def reconcile(self) -> list[EnsureOp]:
"""Method which is implemented in subclass that returns a list of Operations"""
raise NotImplementedError

View File

@ -0,0 +1,26 @@
"""Managed Object models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
class ManagedModel(models.Model):
"""Model which can be managed by authentik exclusively"""
managed = models.TextField(
default=None,
null=True,
verbose_name=_("Managed by authentik"),
help_text=_(
(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
)
),
unique=True,
)
class Meta:
abstract = True

Some files were not shown because too many files have changed in this diff Show More