Compare commits

..

1 Commits

Author SHA1 Message Date
906c63c16f blueprints: add FindObject tag
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2024-12-19 15:55:34 +01:00
412 changed files with 6369 additions and 21700 deletions

View File

@ -1,16 +1,16 @@
[bumpversion]
current_version = 2024.12.2
current_version = 2024.10.5
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
values =
rc
final
optional_value = final
@ -31,4 +31,4 @@ optional_value = final
[bumpversion:file:web/src/common/constants.ts]
[bumpversion:file:lifecycle/aws/template.yaml]
[bumpversion:file:website/docs/install-config/install/aws/template.yaml]

View File

@ -35,6 +35,14 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -52,6 +60,18 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
global:
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -9,9 +9,6 @@ inputs:
image-arch:
required: false
description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs:
shouldPush:
@ -32,24 +29,15 @@ outputs:
imageTags:
description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }}
imageTagsJSON:
description: "Docker image tags, as a JSON array"
value: ${{ steps.ev.outputs.imageTagsJSON }}
attestImageNames:
description: "Docker image names used for attestation"
value: ${{ steps.ev.outputs.attestImageNames }}
cacheTo:
description: "cache-to value for the docker build step"
value: ${{ steps.ev.outputs.cacheTo }}
imageMainTag:
description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }}
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs:
using: "composite"
@ -60,8 +48,6 @@ runs:
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py

View File

@ -2,7 +2,6 @@
import configparser
import os
from json import dumps
from time import time
parser = configparser.ConfigParser()
@ -49,7 +48,7 @@ if is_release:
]
else:
suffix = ""
if image_arch:
if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
@ -71,31 +70,12 @@ def get_attest_image_names(image_with_tags: list[str]):
return ",".join(set(image_tags))
# Generate `cache-to` param
cache_to = ""
if should_push:
_cache_tag = "buildcache"
if image_arch:
_cache_tag += f"-{image_arch}"
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={','.join(image_tags)}", file=_output)
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -1,18 +1,7 @@
#!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# Non-pushing PR
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
python $SCRIPT_DIR/push_vars.py
# Pushing PR/main
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
DOCKER_USERNAME=foo \
python $SCRIPT_DIR/push_vars.py

View File

@ -82,16 +82,6 @@ updates:
docusaurus:
patterns:
- "@docusaurus/*"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "lifecycle/aws:"
labels:
- dependencies
- package-ecosystem: pip
directory: "/"
schedule:

View File

@ -1,95 +0,0 @@
# Re-usable workflow for a single-architecture build
name: Single-arch Container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
image_arch:
required: true
type: string
runs-on:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: false
type: boolean
release:
default: false
type: boolean
outputs:
image-digest:
value: ${{ jobs.build.outputs.image-digest }}
jobs:
build:
name: Build ${{ inputs.image_arch }}
runs-on: ${{ inputs.runs-on }}
outputs:
image-digest: ${{ steps.push.outputs.digest }}
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.3.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: generate ts client
if: ${{ !inputs.release }}
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -1,102 +0,0 @@
# Re-usable workflow for a multi-architecture build
name: Multi-arch container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: true
type: boolean
release:
default: false
type: boolean
outputs: {}
jobs:
build-server-amd64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: amd64
runs-on: ubuntu-latest
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
build-server-arm64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: arm64
runs-on: ubuntu-22.04-arm
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
get-tags:
runs-on: ubuntu-latest
needs:
- build-server-amd64
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
needs:
- get-tags
- build-server-amd64
- build-server-arm64
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true

View File

@ -25,10 +25,10 @@ jobs:
uses: ./.github/actions/setup
- uses: actions/setup-node@v4
with:
node-version-file: lifecycle/aws/package.json
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: |
npm ci
- name: Check changes have been applied

View File

@ -134,7 +134,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0
uses: helm/kind-action@v1.11.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
@ -223,18 +223,68 @@ jobs:
with:
jobs: ${{ toJSON(needs) }}
build:
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
with:
image_name: ghcr.io/goauthentik/dev-server
release: false
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
platforms: linux/${{ matrix.arch }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
pr-comment:
needs:
- build

View File

@ -72,7 +72,7 @@ jobs:
- rac
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -82,7 +82,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.3.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables

View File

@ -7,15 +7,64 @@ on:
jobs:
build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
with:
image_name: ghcr.io/goauthentik/server,beryju/authentik
release: true
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/amd64,linux/arm64
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -34,7 +83,7 @@ jobs:
with:
go-version-file: "go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.3.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -139,8 +188,8 @@ jobs:
aws-region: ${{ env.AWS_REGION }}
- name: Upload template
run: |
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release:
needs:
- build-server

View File

@ -29,6 +29,7 @@
"!Enumerate sequence",
"!Env scalar",
"!Find sequence",
"!FindObject sequence",
"!Format sequence",
"!If sequence",
"!Index scalar",
@ -51,7 +52,9 @@
"ignoreCase": false
}
],
"go.testFlags": ["-count=1"],
"go.testFlags": [
"-count=1"
],
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]

View File

@ -15,7 +15,6 @@ go.mod @goauthentik/backend
go.sum @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure

View File

@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS python-deps
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
@ -116,30 +116,15 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
pip install --no-cache cffi && \
apt-get update && \
apt-get install -y --no-install-recommends \
build-essential libffi-dev \
# Required for cryptography
curl pkg-config \
# Required for lxml
libxslt-dev zlib1g-dev \
# Required for xmlsec
libltdl-dev \
# Required for kadmin
sccache clang && \
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
. "$HOME/.cargo/env" && \
python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip poetry && \
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip uninstall cryptography -y && \
poetry install --only=main --no-ansi --no-interaction --no-root"
pip install --force-reinstall /wheels/*"
# Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips AS final-image
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image
ARG VERSION
ARG GIT_BUILD_HASH
@ -155,12 +140,10 @@ WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
@ -193,8 +176,9 @@ ENV TMPDIR=/dev/shm/ \
PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
GOFIPS=1
POETRY_VIRTUALENVS_CREATE=false
ENV GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

View File

@ -5,7 +5,7 @@ PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github
PY_SOURCES = authentik tests scripts lifecycle .github website/docs/install-config/install/aws
DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api"
@ -78,9 +78,6 @@ migrate: ## Run the Authentik Django server's migrations
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn:
cd lifecycle/aws && npm run aws-cfn
core-i18n-extract:
ak makemessages \
--add-location file \
@ -152,7 +149,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
@ -255,6 +252,9 @@ website-build:
website-watch: ## Build and watch the documentation website, updating automatically
cd website && npm run watch
aws-cfn:
cd website && npm run aws-cfn
#########################
## Docker
#########################

View File

@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| --------- | --------- |
| 2024.8.x | ✅ |
| 2024.10.x | ✅ |
| 2024.12.x | ✅ |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2024.12.2"
__version__ = "2024.10.5"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
@ -16,5 +16,5 @@ def get_full_version() -> str:
"""Get full version, with build hash appended"""
version = __version__
if (build_hash := get_build_hash()) != "":
return f"{version}+{build_hash}"
version += "." + build_hash
return version

View File

@ -7,9 +7,7 @@ from sys import version as python_version
from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend
from django.conf import settings
from django.utils.timezone import now
from django.views.debug import SafeExceptionReporterFilter
from drf_spectacular.utils import extend_schema
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
@ -54,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer):
def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers"""
headers = {}
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
for key, value in request.META.items():
if not isinstance(value, str):
continue
actual_value = value
if raw_session in actual_value:
actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute
)
headers[key] = actual_value
headers[key] = value
return headers
def get_http_host(self, request: Request) -> str:

View File

@ -1,16 +1,12 @@
"""authentik administration overview"""
from socket import gethostname
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from packaging.version import parse
from rest_framework.fields import BooleanField, CharField
from rest_framework.fields import IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import get_full_version
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
@ -20,38 +16,11 @@ class WorkerView(APIView):
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
@extend_schema(
responses=inline_serializer(
"Worker",
fields={
"worker_id": CharField(),
"version": CharField(),
"version_matching": BooleanField(),
},
many=True,
)
)
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
our_version = parse(get_full_version())
response = []
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == our_version
response.append(
{"worker_id": key, "version": version, "version_matching": version_matching}
)
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
response.append(
{
"worker_id": f"authentik-debug@{gethostname()}",
"version": get_full_version(),
"version_matching": True,
}
)
return Response(response)
count += 1
return Response({"count": count})

View File

@ -1,10 +1,11 @@
"""authentik admin app config"""
from prometheus_client import Info
from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig
PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig):

View File

@ -1,35 +1,14 @@
"""admin signals"""
from django.dispatch import receiver
from packaging.version import parse
from prometheus_client import Gauge
from authentik import get_full_version
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
GAUGE_WORKERS = Gauge(
"authentik_admin_workers",
"Currently connected workers, their versions and if they are the same version as authentik",
["version", "version_matched"],
)
_version = parse(get_full_version())
@receiver(monitoring_set)
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
worker_version_count = {}
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == _version
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
worker_version_count[version]["count"] += 1
for version, stats in worker_version_count.items():
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)

View File

@ -34,7 +34,7 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:admin_workers"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(len(body), 0)
self.assertEqual(body["count"], 0)
def test_metrics(self):
"""Test metrics API"""

View File

@ -0,0 +1,67 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
"""Filter objects by their owner"""
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)
class OwnerPermissions(BasePermission):
"""Authorize requests by an object's owner matching the requesting user"""
owner_key = "user"
def has_permission(self, request: Request, view) -> bool:
"""If the user is authenticated, we allow all requests here. For listing, the
object-level permissions are done by the filter backend"""
return request.user.is_authenticated
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
"""Check if the object's owner matches the currently logged in user"""
if not hasattr(obj, self.owner_key):
return False
owner = getattr(obj, self.owner_key)
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -1,68 +0,0 @@
"""Test and debug Blueprints"""
import atexit
import readline
from pathlib import Path
from pprint import pformat
from sys import exit as sysexit
from textwrap import indent
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from yaml import load
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
from authentik.core.management.commands.shell import get_banner_text
from authentik.lib.utils.errors import exception_to_string
LOGGER = get_logger()
class Command(BaseCommand):
"""Test and debug Blueprints"""
lines = []
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
histfolder.mkdir(parents=True, exist_ok=True)
histfile = histfolder / Path("blueprint_shell_history")
readline.parse_and_bind("tab: complete")
readline.parse_and_bind("set editing-mode vi")
try:
readline.read_history_file(str(histfile))
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, str(histfile))
@no_translations
def handle(self, *args, **options):
"""Interactively debug blueprint files"""
self.stdout.write(get_banner_text("Blueprint shell"))
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
def do_eval():
yaml_input = "\n".join([line for line in self.lines if line])
data = load(yaml_input, BlueprintLoader)
self.stdout.write(pformat(data))
self.lines = []
while True:
try:
line = input("> ")
if line == ".eval":
do_eval()
else:
self.lines.append(line)
except EntryInvalidError as exc:
self.stdout.write("Failed to evaluate expression:")
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
except EOFError:
break
except KeyboardInterrupt:
self.stdout.write()
sysexit(0)
self.stdout.write()

View File

@ -126,7 +126,7 @@ class Command(BaseCommand):
def_name_perm = f"model_{model_path}_permissions"
def_path_perm = f"#/$defs/{def_name_perm}"
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
template = {
return {
"type": "object",
"required": ["model", "identifiers"],
"properties": {
@ -143,11 +143,6 @@ class Command(BaseCommand):
"identifiers": {"$ref": def_path},
},
}
# Meta models don't require identifiers, as there's no matching database model to find
if issubclass(model, BaseMetaModel):
del template["properties"]["identifiers"]
template["required"].remove("identifiers")
return template
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""

View File

@ -150,6 +150,7 @@ entries:
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
at_index_mapping: !AtIndex [!Context mapping, "key2"]
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
find_object: !AtIndex [!FindObject [authentik_providers_oauth2.scopemapping, [scope_name, openid]], managed]
identifiers:
name: test
conditions:

View File

@ -4,6 +4,7 @@ from os import environ
from django.test import TransactionTestCase
from authentik.blueprints.tests import apply_blueprint
from authentik.blueprints.v1.exporter import FlowExporter
from authentik.blueprints.v1.importer import Importer, transaction_rollback
from authentik.core.models import Group
@ -126,6 +127,7 @@ class TestBlueprintsV1(TransactionTestCase):
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
@apply_blueprint("system/providers-oauth2.yaml")
def test_import_yaml_tags(self):
"""Test some yaml tags"""
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
@ -136,91 +138,93 @@ class TestBlueprintsV1(TransactionTestCase):
self.assertTrue(importer.apply())
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
self.assertTrue(policy)
self.assertTrue(
Group.objects.filter(
attributes={
"policy_pk1": str(policy.pk) + "-suffix",
"policy_pk2": str(policy.pk) + "-suffix",
"boolAnd": True,
"boolNand": False,
"boolOr": True,
"boolNor": False,
"boolXor": True,
"boolXnor": False,
"boolComplex": True,
"if_true_complex": {
"dictionary": {
"with": {"keys": "and_values"},
"and_nested_custom_tags": "foo-bar",
}
group = Group.objects.filter(name="test").first()
self.assertIsNotNone(group)
self.assertEqual(
group.attributes,
{
"policy_pk1": str(policy.pk) + "-suffix",
"policy_pk2": str(policy.pk) + "-suffix",
"boolAnd": True,
"boolNand": False,
"boolOr": True,
"boolNor": False,
"boolXor": True,
"boolXnor": False,
"boolComplex": True,
"if_true_complex": {
"dictionary": {
"with": {"keys": "and_values"},
"and_nested_custom_tags": "foo-bar",
}
},
"if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True,
"if_short": True,
"if_false_simple": 2,
"enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value",
"prefix-key2": "other-prefix-2",
},
"enumerate_mapping_to_sequence": [
"prefixed-pair-key1-value",
"prefixed-pair-key2-2",
],
"enumerate_sequence_to_sequence": [
"prefixed-items-0-foo",
"prefixed-items-1-bar",
],
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
"nested_complex_enumeration": {
"0": {
"key1": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": 2,
"middle_index": "key2",
},
],
},
"if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True,
"if_short": True,
"if_false_simple": 2,
"enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value",
"prefix-key2": "other-prefix-2",
"1": {
"key1": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": 2,
"middle_index": "key2",
},
],
},
"enumerate_mapping_to_sequence": [
"prefixed-pair-key1-value",
"prefixed-pair-key2-2",
],
"enumerate_sequence_to_sequence": [
"prefixed-items-0-foo",
"prefixed-items-1-bar",
],
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
"nested_complex_enumeration": {
"0": {
"key1": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": 2,
"middle_index": "key2",
},
],
},
"1": {
"key1": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": 2,
"middle_index": "key2",
},
],
},
},
"nested_context": "context-nested-value",
"env_null": None,
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
}
).exists()
},
"nested_context": "context-nested-value",
"env_null": None,
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
"find_object": "goauthentik.io/providers/oauth2/scope-openid",
},
)
self.assertTrue(
OAuthSource.objects.filter(

View File

@ -202,9 +202,6 @@ class Blueprint:
class YAMLTag:
"""Base class for all YAML Tags"""
def __repr__(self) -> str:
return str(self.resolve(BlueprintEntry(""), Blueprint()))
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic"""
raise NotImplementedError
@ -314,7 +311,7 @@ class Format(YAMLTag):
class Find(YAMLTag):
"""Find any object"""
"""Find any object primary key"""
model_name: str | YAMLTag
conditions: list[list]
@ -329,7 +326,7 @@ class Find(YAMLTag):
values.append(loader.construct_object(node_values))
self.conditions.append(values)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
def _get_instance(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.model_name, YAMLTag):
model_name = self.model_name.resolve(entry, blueprint)
else:
@ -351,12 +348,29 @@ class Find(YAMLTag):
else:
query_value = cond[1]
query &= Q(**{query_key: query_value})
instance = model_class.objects.filter(query).first()
return model_class.objects.filter(query).first()
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
instance = self._get_instance(entry, blueprint)
if instance:
return instance.pk
return None
class FindObject(Find):
"""Find any object"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
instance = self._get_instance(entry, blueprint)
if not instance:
return None
if not isinstance(instance, SerializerModel):
raise EntryInvalidError.from_entry(
f"Model {self.model_name} is not resolvable through FindObject", entry
)
return instance.serializer(instance=instance).data
class Condition(YAMLTag):
"""Convert all values to a single boolean"""
@ -652,6 +666,7 @@ class BlueprintLoader(SafeLoader):
super().__init__(*args, **kwargs)
self.add_constructor("!KeyOf", KeyOf)
self.add_constructor("!Find", Find)
self.add_constructor("!FindObject", FindObject)
self.add_constructor("!Context", Context)
self.add_constructor("!Format", Format)
self.add_constructor("!Condition", Condition)

View File

@ -14,10 +14,10 @@ from rest_framework.response import Response
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import SecretKeyFilter
from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.rbac.filters import SecretKeyFilter
from authentik.tenants.utils import get_current_tenant

View File

@ -1,16 +1,15 @@
"""Application Roles API Viewset"""
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import (
Application,
ApplicationEntitlement,
User,
)
@ -19,10 +18,7 @@ class ApplicationEntitlementSerializer(ModelSerializer):
def validate_app(self, app: Application) -> Application:
"""Ensure user has permission to view"""
request: HttpRequest = self.context.get("request")
if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context:
return app
user = request.user
user: User = self._context["request"].user
if user.has_perm("view_application", app) or user.has_perm(
"authentik_core.view_application"
):

View File

@ -2,12 +2,16 @@
from typing import TypedDict
from django_filters.rest_framework import DjangoFilterBackend
from guardian.utils import get_anonymous_user
from rest_framework import mixins
from rest_framework.fields import SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import AuthenticatedSession
@ -106,4 +110,11 @@ class AuthenticatedSessionViewSet(
search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"]
owner_field = "user"
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)

View File

@ -2,16 +2,19 @@
from collections.abc import Iterable
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.object_types import TypesMixin
from authentik.core.api.used_by import UsedByMixin
@ -186,10 +189,11 @@ class UserSourceConnectionViewSet(
queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filterset_fields = ["user", "source__slug"]
search_fields = ["source__slug"]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["source__slug", "pk"]
owner_field = "user"
class GroupSourceConnectionSerializer(SourceSerializer):
@ -224,7 +228,8 @@ class GroupSourceConnectionViewSet(
queryset = GroupSourceConnection.objects.all()
serializer_class = GroupSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filterset_fields = ["group", "source__slug"]
search_fields = ["source__slug"]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["source__slug", "pk"]
owner_field = "user"

View File

@ -3,15 +3,18 @@
from typing import Any
from django.utils.timezone import now
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import assign_perm, get_anonymous_user
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.blueprints.api import ManagedSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
@ -135,8 +138,8 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"managed",
]
ordering = ["identifier", "expires"]
owner_field = "user"
rbac_allow_create_without_perm = True
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()

View File

@ -427,7 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
queryset = User.objects.none()
ordering = ["username"]
serializer_class = UserSerializer
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
search_fields = ["username", "name", "is_active", "email", "uuid"]
filterset_class = UsersFilter
def get_queryset(self):
@ -585,7 +585,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""Set password for user"""
user: User = self.get_object()
try:
user.set_password(request.data.get("password"), request=request)
user.set_password(request.data.get("password"))
user.save()
except (ValidationError, IntegrityError) as exc:
LOGGER.debug("Failed to set password", exc=exc)

View File

@ -44,12 +44,13 @@ class TokenBackend(InbuiltBackend):
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
) -> User | None:
try:
user = User._default_manager.get_by_natural_key(username)
except User.DoesNotExist:
# Run the default password hasher once to reduce the timing
# difference between an existing and a nonexistent user (#20760).
User().set_password(password, request=request)
User().set_password(password)
return None
tokens = Token.filter_not_expired(

View File

@ -58,7 +58,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
self._context["user"] = user
if request:
req.http_request = request
self._context["http_request"] = request
req.context.update(**kwargs)
self._context["request"] = req
self._context.update(**kwargs)

View File

@ -17,9 +17,7 @@ from authentik.events.middleware import should_log_model
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
def get_banner_text(shell_type="shell") -> str:
return f"""### authentik {shell_type} ({get_full_version()})
BANNER_TEXT = f"""### authentik shell ({get_full_version()})
### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """
@ -116,4 +114,4 @@ class Command(BaseCommand):
readline.parse_and_bind("tab: complete")
# Run interactive shell
code.interact(banner=get_banner_text(), local=namespace)
code.interact(banner=BANNER_TEXT, local=namespace)

View File

@ -1,45 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0041_applicationentitlement"),
]
operations = [
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["expires"], name="authentik_c_expires_08251d_idx"),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["expiring"], name="authentik_c_expirin_9cd839_idx"),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(
fields=["expiring", "expires"], name="authentik_c_expirin_195a84_idx"
),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["session_key"], name="authentik_c_session_d0f005_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(fields=["expires"], name="authentik_c_expires_a62b4b_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(fields=["expiring"], name="authentik_c_expirin_a1b838_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(
fields=["expiring", "expires"], name="authentik_c_expirin_ba04d9_idx"
),
),
]

View File

@ -356,13 +356,13 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
"""superuser == staff user"""
return self.is_superuser # type: ignore
def set_password(self, raw_password, signal=True, sender=None, request=None):
def set_password(self, raw_password, signal=True, sender=None):
if self.pk and signal:
from authentik.core.signals import password_changed
if not sender:
sender = self
password_changed.send(sender=sender, user=self, password=raw_password, request=request)
password_changed.send(sender=sender, user=self, password=raw_password)
self.password_change_date = now()
return super().set_password(raw_password)
@ -846,11 +846,6 @@ class ExpiringModel(models.Model):
class Meta:
abstract = True
indexes = [
models.Index(fields=["expires"]),
models.Index(fields=["expiring"]),
models.Index(fields=["expiring", "expires"]),
]
def expire_action(self, *args, **kwargs):
"""Handler which is called when this object is expired. By
@ -906,7 +901,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
class Meta:
verbose_name = _("Token")
verbose_name_plural = _("Tokens")
indexes = ExpiringModel.Meta.indexes + [
indexes = [
models.Index(fields=["identifier"]),
models.Index(fields=["key"]),
]
@ -1006,9 +1001,6 @@ class AuthenticatedSession(ExpiringModel):
class Meta:
verbose_name = _("Authenticated Session")
verbose_name_plural = _("Authenticated Sessions")
indexes = ExpiringModel.Meta.indexes + [
models.Index(fields=["session_key"]),
]
def __str__(self) -> str:
return f"Authenticated Session {self.session_key[:10]}"

View File

@ -28,6 +28,7 @@ from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import SecretKeyFilter
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.crypto.apps import MANAGED_KEY
@ -35,7 +36,7 @@ from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction
from authentik.rbac.decorators import permission_required
from authentik.rbac.filters import ObjectFilter, SecretKeyFilter
from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()

View File

@ -1,27 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_enterprise", "0003_remove_licenseusage_within_limits_and_more"),
]
operations = [
migrations.AddIndex(
model_name="licenseusage",
index=models.Index(fields=["expires"], name="authentik_e_expires_3f2956_idx"),
),
migrations.AddIndex(
model_name="licenseusage",
index=models.Index(fields=["expiring"], name="authentik_e_expirin_11d3d7_idx"),
),
migrations.AddIndex(
model_name="licenseusage",
index=models.Index(
fields=["expiring", "expires"], name="authentik_e_expirin_4d558f_idx"
),
),
]

View File

@ -93,4 +93,3 @@ class LicenseUsage(ExpiringModel):
class Meta:
verbose_name = _("License Usage")
verbose_name_plural = _("License Usage Records")
indexes = ExpiringModel.Meta.indexes

View File

@ -1,8 +1,11 @@
"""RAC Provider API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import GenericViewSet
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.core.api.groups import GroupMemberSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
@ -31,6 +34,12 @@ class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
]
class ConnectionTokenOwnerFilter(OwnerFilter):
"""Owner filter for connection tokens (checks session's user)"""
owner_key = "session__user"
class ConnectionTokenViewSet(
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
@ -46,4 +55,10 @@ class ConnectionTokenViewSet(
filterset_fields = ["endpoint", "session__user", "provider"]
search_fields = ["endpoint__name", "provider__name"]
ordering = ["endpoint__name", "provider__name"]
owner_field = "session__user"
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [
ConnectionTokenOwnerFilter,
DjangoFilterBackend,
OrderingFilter,
SearchFilter,
]

View File

@ -1,28 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
("authentik_providers_rac", "0005_alter_racpropertymapping_options"),
]
operations = [
migrations.AddIndex(
model_name="connectiontoken",
index=models.Index(fields=["expires"], name="authentik_p_expires_91f148_idx"),
),
migrations.AddIndex(
model_name="connectiontoken",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_59a5a7_idx"),
),
migrations.AddIndex(
model_name="connectiontoken",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_aed3ca_idx"
),
),
]

View File

@ -159,9 +159,9 @@ class ConnectionToken(ExpiringModel):
default_settings["port"] = str(port)
else:
default_settings["hostname"] = self.endpoint.host
if self.endpoint.protocol == Protocols.RDP:
default_settings["resize-method"] = "display-update"
default_settings["client-name"] = f"authentik - {self.session.user}"
default_settings["client-name"] = "authentik"
# default_settings["enable-drive"] = "true"
# default_settings["drive-name"] = "authentik"
settings = {}
always_merger.merge(settings, default_settings)
always_merger.merge(settings, self.endpoint.provider.settings)
@ -211,4 +211,3 @@ class ConnectionToken(ExpiringModel):
class Meta:
verbose_name = _("RAC Connection token")
verbose_name_plural = _("RAC Connection tokens")
indexes = ExpiringModel.Meta.indexes

View File

@ -50,10 +50,9 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"resize-method": "display-update",
},
)
# Set settings in provider
@ -64,11 +63,10 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"level": "provider",
"resize-method": "display-update",
},
)
# Set settings in endpoint
@ -81,11 +79,10 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"level": "endpoint",
"resize-method": "display-update",
},
)
# Set settings in token
@ -98,11 +95,10 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"level": "token",
"resize-method": "display-update",
},
)
# Set settings in property mapping (provider)
@ -118,11 +114,10 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"level": "property_mapping_provider",
"resize-method": "display-update",
},
)
# Set settings in property mapping (endpoint)
@ -140,12 +135,11 @@ class TestModels(TransactionTestCase):
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"client-name": "authentik",
"drive-path": path,
"create-drive-path": "true",
"level": "property_mapping_endpoint",
"foo": "true",
"bar": "6",
"resize-method": "display-update",
},
)

View File

@ -1,11 +1,14 @@
"""AuthenticatorEndpointGDTCStage API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.permissions import IsAdminUser
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
@ -64,7 +67,8 @@ class EndpointDeviceViewSet(
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
class EndpointAdminDeviceViewSet(ModelViewSet):

View File

@ -1,15 +1,17 @@
"""Notification API Views"""
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.fields import ReadOnlyField
from rest_framework.permissions import IsAuthenticated
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.events.api.events import EventSerializer
@ -55,7 +57,8 @@ class NotificationViewSet(
"seen",
"user",
]
owner_field = "user"
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
@extend_schema(
request=OpenApiTypes.NONE,
@ -63,7 +66,7 @@ class NotificationViewSet(
204: OpenApiResponse(description="Marked tasks as read successfully."),
},
)
@action(detail=False, methods=["post"], permission_classes=[IsAuthenticated])
@action(detail=False, methods=["post"])
def mark_all_seen(self, request: Request) -> Response:
"""Mark all the user's notifications as seen"""
Notification.objects.filter(user=request.user, seen=False).update(seen=True)

View File

@ -1,41 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_events", "0007_event_authentik_e_action_9a9dd9_idx_and_more"),
]
operations = [
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["expires"], name="authentik_e_expires_8c73a8_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(fields=["expiring"], name="authentik_e_expirin_b5cb5e_idx"),
),
migrations.AddIndex(
model_name="event",
index=models.Index(
fields=["expiring", "expires"], name="authentik_e_expirin_e37180_idx"
),
),
migrations.AddIndex(
model_name="systemtask",
index=models.Index(fields=["expires"], name="authentik_e_expires_4d3985_idx"),
),
migrations.AddIndex(
model_name="systemtask",
index=models.Index(fields=["expiring"], name="authentik_e_expirin_81d649_idx"),
),
migrations.AddIndex(
model_name="systemtask",
index=models.Index(
fields=["expiring", "expires"], name="authentik_e_expirin_eb3598_idx"
),
),
]

View File

@ -306,7 +306,7 @@ class Event(SerializerModel, ExpiringModel):
class Meta:
verbose_name = _("Event")
verbose_name_plural = _("Events")
indexes = ExpiringModel.Meta.indexes + [
indexes = [
models.Index(fields=["action"]),
models.Index(fields=["user"]),
models.Index(fields=["app"]),
@ -694,4 +694,3 @@ class SystemTask(SerializerModel, ExpiringModel):
permissions = [("run_task", _("Run task"))]
verbose_name = _("System Task")
verbose_name_plural = _("System Tasks")
indexes = ExpiringModel.Meta.indexes

View File

@ -106,9 +106,9 @@ def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_
@receiver(password_changed)
def on_password_changed(sender, user: User, password: str, request: HttpRequest | None, **_):
def on_password_changed(sender, user: User, password: str, **_):
"""Log password change"""
Event.new(EventAction.PASSWORD_SET).from_http(request, user=user)
Event.new(EventAction.PASSWORD_SET).from_http(None, user=user)
@receiver(post_save, sender=Event)

View File

@ -138,6 +138,7 @@ def notification_cleanup(self: SystemTask):
"""Cleanup seen notifications and notifications whose event expired."""
notifications = Notification.objects.filter(Q(event=None) | Q(seen=True))
amount = notifications.count()
notifications.delete()
for notification in notifications:
notification.delete()
LOGGER.debug("Expired notifications", amount=amount)
self.set_status(TaskStatus.SUCCESSFUL, f"Expired {amount} Notifications")

View File

@ -1,7 +1,5 @@
"""Flow Stage API Views"""
from uuid import uuid4
from django.urls.base import reverse
from drf_spectacular.utils import extend_schema
from rest_framework import mixins
@ -29,11 +27,6 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
component = SerializerMethodField()
flow_set = FlowSetSerializer(many=True, required=False)
def to_representation(self, instance: Stage):
if isinstance(instance, Stage) and instance.is_in_memory:
instance.stage_uuid = uuid4()
return super().to_representation(instance)
def get_component(self, obj: Stage) -> str:
"""Get object type so that we know how to edit the object"""
if obj.__class__ == Stage:

View File

@ -88,8 +88,7 @@ class Migration(migrations.Migration):
model_name="flowstagebinding",
name="re_evaluate_policies",
field=models.BooleanField(
default=False,
help_text="Evaluate policies when the Stage is presented to the user.",
default=False, help_text="Evaluate policies when the Stage is present to the user."
),
),
migrations.AddField(

View File

@ -20,7 +20,7 @@ class Migration(migrations.Migration):
model_name="flowstagebinding",
name="re_evaluate_policies",
field=models.BooleanField(
default=True, help_text="Evaluate policies when the Stage is presented to the user."
default=True, help_text="Evaluate policies when the Stage is present to the user."
),
),
]

View File

@ -102,12 +102,8 @@ class Stage(SerializerModel):
user settings are available, or a challenge."""
return None
@property
def is_in_memory(self):
return hasattr(self, "__in_memory_type")
def __str__(self):
if self.is_in_memory:
if hasattr(self, "__in_memory_type"):
return f"In-memory Stage {getattr(self, '__in_memory_type')}"
return f"Stage {self.name}"
@ -231,7 +227,7 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
)
re_evaluate_policies = models.BooleanField(
default=True,
help_text=_("Evaluate policies when the Stage is presented to the user."),
help_text=_("Evaluate policies when the Stage is present to the user."),
)
invalid_response_action = models.TextField(

View File

@ -159,17 +159,9 @@ class FlowPlan:
stage = final_stage(request=request, executor=temp_exec)
return stage.dispatch(request)
get_qs = request.GET.copy()
if request.user.is_authenticated and (
# Object-scoped permission or global permission
request.user.has_perm("authentik_flows.inspect_flow", flow)
or request.user.has_perm("authentik_flows.inspect_flow")
):
get_qs["inspector"] = "available"
return redirect_with_qs(
"authentik_core:if-flow",
get_qs,
request.GET,
flow_slug=flow.slug,
)

View File

@ -7,8 +7,8 @@ from django.http import HttpRequest, HttpResponse
from django.test.client import RequestFactory
from django.urls import reverse
from authentik.core.models import Group, User
from authentik.core.tests.utils import create_test_flow, create_test_user
from authentik.core.models import User
from authentik.core.tests.utils import create_test_flow
from authentik.flows.markers import ReevaluateMarker, StageMarker
from authentik.flows.models import (
FlowDeniedAction,
@ -255,11 +255,7 @@ class TestFlowExecutor(FlowTestCase):
)
binding = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=0,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
)
binding2 = FlowStageBinding.objects.create(
target=flow,
@ -282,8 +278,8 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[0], binding)
self.assertEqual(plan.bindings[1], binding2)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertEqual(plan.markers[1].__class__, ReevaluateMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
# Second request, this passes the first dummy stage
response = self.client.post(exec_url)
@ -305,11 +301,7 @@ class TestFlowExecutor(FlowTestCase):
)
binding = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=0,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
)
binding2 = FlowStageBinding.objects.create(
target=flow,
@ -318,11 +310,7 @@ class TestFlowExecutor(FlowTestCase):
re_evaluate_policies=True,
)
binding3 = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=2,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=2
)
PolicyBinding.objects.create(policy=false_policy, target=binding2, order=0)
@ -340,9 +328,9 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[1], binding2)
self.assertEqual(plan.bindings[2], binding3)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertEqual(plan.markers[1].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[2].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
self.assertIsInstance(plan.markers[2], StageMarker)
# Second request, this passes the first dummy stage
response = self.client.post(exec_url)
@ -353,8 +341,8 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[0], binding2)
self.assertEqual(plan.bindings[1], binding3)
self.assertEqual(plan.markers[0].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[1].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], StageMarker)
# third request, this should trigger the re-evaluate
# We do this request without the patch, so the policy results in false
@ -372,11 +360,7 @@ class TestFlowExecutor(FlowTestCase):
)
binding = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=0,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
)
binding2 = FlowStageBinding.objects.create(
target=flow,
@ -385,11 +369,7 @@ class TestFlowExecutor(FlowTestCase):
re_evaluate_policies=True,
)
binding3 = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=2,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=2
)
PolicyBinding.objects.create(policy=true_policy, target=binding2, order=0)
@ -407,9 +387,9 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[1], binding2)
self.assertEqual(plan.bindings[2], binding3)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertEqual(plan.markers[1].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[2].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
self.assertIsInstance(plan.markers[2], StageMarker)
# Second request, this passes the first dummy stage
response = self.client.post(exec_url)
@ -420,8 +400,8 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[0], binding2)
self.assertEqual(plan.bindings[1], binding3)
self.assertEqual(plan.markers[0].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[1].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], StageMarker)
# Third request, this passes the first dummy stage
response = self.client.post(exec_url)
@ -431,7 +411,7 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[0], binding3)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
# third request, this should trigger the re-evaluate
# We do this request without the patch, so the policy results in false
@ -449,11 +429,7 @@ class TestFlowExecutor(FlowTestCase):
)
binding = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=0,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
)
binding2 = FlowStageBinding.objects.create(
target=flow,
@ -468,11 +444,7 @@ class TestFlowExecutor(FlowTestCase):
re_evaluate_policies=True,
)
binding4 = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=2,
evaluate_on_plan=True,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=2
)
PolicyBinding.objects.create(policy=false_policy, target=binding2, order=0)
@ -493,10 +465,10 @@ class TestFlowExecutor(FlowTestCase):
self.assertEqual(plan.bindings[2], binding3)
self.assertEqual(plan.bindings[3], binding4)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertEqual(plan.markers[1].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[2].__class__, ReevaluateMarker)
self.assertEqual(plan.markers[3].__class__, StageMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], ReevaluateMarker)
self.assertIsInstance(plan.markers[2], ReevaluateMarker)
self.assertIsInstance(plan.markers[3], StageMarker)
# Second request, this passes the first dummy stage
response = self.client.post(exec_url)
@ -547,9 +519,9 @@ class TestFlowExecutor(FlowTestCase):
)
# Stage 0 is a deny stage that is added dynamically
# when the reputation policy says so
deny_stage = DenyStage.objects.create(name=generate_id())
deny_stage = DenyStage.objects.create(name="deny")
reputation_policy = ReputationPolicy.objects.create(
name=generate_id(), threshold=-1, check_ip=False
name="reputation", threshold=-1, check_ip=False
)
deny_binding = FlowStageBinding.objects.create(
target=flow,
@ -562,7 +534,7 @@ class TestFlowExecutor(FlowTestCase):
# Stage 1 is an identification stage
ident_stage = IdentificationStage.objects.create(
name=generate_id(),
name="ident",
user_fields=[UserFields.E_MAIL],
pretend_user_exists=False,
)
@ -587,64 +559,3 @@ class TestFlowExecutor(FlowTestCase):
)
response = self.client.post(exec_url, {"uid_field": "invalid-string"}, follow=True)
self.assertStageResponse(response, flow, component="ak-stage-access-denied")
def test_re_evaluate_group_binding(self):
"""Test re-evaluate stage binding that has a policy binding to a group"""
flow = create_test_flow()
user_group_membership = create_test_user()
user_direct_binding = create_test_user()
user_other = create_test_user()
group_a = Group.objects.create(name=generate_id())
user_group_membership.ak_groups.add(group_a)
# Stage 0 is an identification stage
ident_stage = IdentificationStage.objects.create(
name=generate_id(),
user_fields=[UserFields.USERNAME],
pretend_user_exists=False,
)
FlowStageBinding.objects.create(
target=flow,
stage=ident_stage,
order=0,
)
# Stage 1 is a dummy stage that is only shown for users in group_a
dummy_stage = DummyStage.objects.create(name=generate_id())
dummy_binding = FlowStageBinding.objects.create(target=flow, stage=dummy_stage, order=1)
PolicyBinding.objects.create(group=group_a, target=dummy_binding, order=0)
PolicyBinding.objects.create(user=user_direct_binding, target=dummy_binding, order=0)
# Stage 2 is a deny stage that (in this case) only user_b will see
deny_stage = DenyStage.objects.create(name=generate_id())
FlowStageBinding.objects.create(target=flow, stage=deny_stage, order=2)
exec_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
with self.subTest(f"Test user access through group: {user_group_membership}"):
self.client.logout()
# First request, run the planner
response = self.client.get(exec_url)
self.assertStageResponse(response, flow, component="ak-stage-identification")
response = self.client.post(
exec_url, {"uid_field": user_group_membership.username}, follow=True
)
self.assertStageResponse(response, flow, component="ak-stage-dummy")
with self.subTest(f"Test user access through user: {user_direct_binding}"):
self.client.logout()
# First request, run the planner
response = self.client.get(exec_url)
self.assertStageResponse(response, flow, component="ak-stage-identification")
response = self.client.post(
exec_url, {"uid_field": user_direct_binding.username}, follow=True
)
self.assertStageResponse(response, flow, component="ak-stage-dummy")
with self.subTest(f"Test user has no access: {user_other}"):
self.client.logout()
# First request, run the planner
response = self.client.get(exec_url)
self.assertStageResponse(response, flow, component="ak-stage-identification")
response = self.client.post(exec_url, {"uid_field": user_other.username}, follow=True)
self.assertStageResponse(response, flow, component="ak-stage-access-denied")

View File

@ -8,7 +8,6 @@ from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.models import FlowDesignation, FlowStageBinding, InvalidResponseAction
from authentik.lib.generators import generate_id
from authentik.stages.dummy.models import DummyStage
from authentik.stages.identification.models import IdentificationStage, UserFields
@ -27,7 +26,7 @@ class TestFlowInspector(APITestCase):
# Stage 1 is an identification stage
ident_stage = IdentificationStage.objects.create(
name=generate_id(),
name="ident",
user_fields=[UserFields.USERNAME],
)
FlowStageBinding.objects.create(
@ -36,8 +35,9 @@ class TestFlowInspector(APITestCase):
order=1,
invalid_response_action=InvalidResponseAction.RESTART_WITH_CONTEXT,
)
dummy_stage = DummyStage.objects.create(name=generate_id())
FlowStageBinding.objects.create(target=flow, stage=dummy_stage, order=1)
FlowStageBinding.objects.create(
target=flow, stage=DummyStage.objects.create(name="dummy2"), order=1
)
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
@ -68,11 +68,9 @@ class TestFlowInspector(APITestCase):
)
content = loads(ins.content)
self.assertEqual(content["is_completed"], False)
self.assertEqual(content["current_plan"]["current_stage"]["stage_obj"]["name"], "ident")
self.assertEqual(
content["current_plan"]["current_stage"]["stage_obj"]["name"], ident_stage.name
)
self.assertEqual(
content["current_plan"]["next_planned_stage"]["stage_obj"]["name"], dummy_stage.name
content["current_plan"]["next_planned_stage"]["stage_obj"]["name"], "dummy2"
)
self.client.post(
@ -86,12 +84,8 @@ class TestFlowInspector(APITestCase):
)
content = loads(ins.content)
self.assertEqual(content["is_completed"], False)
self.assertEqual(
content["plans"][0]["current_stage"]["stage_obj"]["name"], ident_stage.name
)
self.assertEqual(
content["current_plan"]["current_stage"]["stage_obj"]["name"], dummy_stage.name
)
self.assertEqual(content["plans"][0]["current_stage"]["stage_obj"]["name"], "ident")
self.assertEqual(content["current_plan"]["current_stage"]["stage_obj"]["name"], "dummy2")
self.assertEqual(
content["current_plan"]["plan_context"]["pending_user"]["username"], self.admin.username
)

View File

@ -29,7 +29,6 @@ from authentik.flows.planner import (
cache_key,
)
from authentik.flows.stage import StageView
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import dummy_get_response
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
@ -154,7 +153,7 @@ class TestFlowPlanner(TestCase):
"""Test planner cache"""
flow = create_test_flow(FlowDesignation.AUTHENTICATION)
FlowStageBinding.objects.create(
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
target=flow, stage=DummyStage.objects.create(name="dummy"), order=0
)
request = self.request_factory.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
@ -173,7 +172,7 @@ class TestFlowPlanner(TestCase):
"""Test planner with default_context"""
flow = create_test_flow()
FlowStageBinding.objects.create(
target=flow, stage=DummyStage.objects.create(name=generate_id()), order=0
target=flow, stage=DummyStage.objects.create(name="dummy"), order=0
)
user = User.objects.create(username="test-user")
@ -192,7 +191,7 @@ class TestFlowPlanner(TestCase):
FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
stage=DummyStage.objects.create(name="dummy1"),
order=0,
re_evaluate_policies=True,
)
@ -205,7 +204,7 @@ class TestFlowPlanner(TestCase):
planner = FlowPlanner(flow)
plan = planner.plan(request)
self.assertEqual(plan.markers[0].__class__, ReevaluateMarker)
self.assertIsInstance(plan.markers[0], ReevaluateMarker)
def test_planner_reevaluate_actual(self):
"""Test planner with re-evaluate"""
@ -213,14 +212,11 @@ class TestFlowPlanner(TestCase):
false_policy = DummyPolicy.objects.create(result=False, wait_min=1, wait_max=2)
binding = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
order=0,
re_evaluate_policies=False,
target=flow, stage=DummyStage.objects.create(name="dummy1"), order=0
)
binding2 = FlowStageBinding.objects.create(
target=flow,
stage=DummyStage.objects.create(name=generate_id()),
stage=DummyStage.objects.create(name="dummy2"),
order=1,
re_evaluate_policies=True,
)
@ -244,8 +240,6 @@ class TestFlowPlanner(TestCase):
self.assertEqual(plan.bindings[0], binding)
self.assertEqual(plan.bindings[1], binding2)
self.assertEqual(plan.markers[0].__class__, StageMarker)
self.assertEqual(plan.markers[1].__class__, ReevaluateMarker)
self.assertIsInstance(plan.markers[0], StageMarker)
self.assertIsInstance(plan.markers[1], ReevaluateMarker)

View File

@ -78,9 +78,7 @@ class FlowInspectorView(APIView):
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
if settings.DEBUG:
return
if request.user.has_perm(
"authentik_flows.inspect_flow", self.flow
) or request.user.has_perm("authentik_flows.inspect_flow"):
if request.user.has_perm("authentik_flow.inspect_flow", self.flow):
return
raise Http404
@ -96,9 +94,6 @@ class FlowInspectorView(APIView):
"""Get current flow state and record it"""
plans = []
for plan in request.session.get(SESSION_KEY_HISTORY, []):
plan: FlowPlan
if plan.flow_pk != self.flow.pk.hex:
continue
plan_serializer = FlowInspectorPlanSerializer(
instance=plan, context={"request": request}
)

View File

@ -280,24 +280,9 @@ class ConfigLoader:
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
return default
def get_optional_int(self, path: str, default=None) -> int | None:
"""Wrapper for get that converts value into int or None if set"""
value = self.get(path, default)
try:
return int(value)
except (ValueError, TypeError) as exc:
if value is None or (isinstance(value, str) and value.lower() == "null"):
return None
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
return default
def get_bool(self, path: str, default=False) -> bool:
"""Wrapper for get that converts value into boolean"""
value = self.get(path, UNSET)
if value is UNSET:
return default
return str(self.get(path)).lower() == "true"
return str(self.get(path, default)).lower() == "true"
def get_keys(self, path: str, sep=".") -> list[str]:
"""List attribute keys by using yaml path"""
@ -369,33 +354,20 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
"sslcert": config.get("postgresql.sslcert"),
"sslkey": config.get("postgresql.sslkey"),
},
"CONN_MAX_AGE": CONFIG.get_optional_int("postgresql.conn_max_age", 0),
"CONN_HEALTH_CHECKS": CONFIG.get_bool("postgresql.conn_health_checks", False),
"DISABLE_SERVER_SIDE_CURSORS": CONFIG.get_bool(
"postgresql.disable_server_side_cursors", False
),
"TEST": {
"NAME": config.get("postgresql.test.name"),
},
}
}
conn_max_age = CONFIG.get_optional_int("postgresql.conn_max_age", UNSET)
disable_server_side_cursors = CONFIG.get_bool("postgresql.disable_server_side_cursors", UNSET)
if config.get_bool("postgresql.use_pgpool", False):
db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
if disable_server_side_cursors is not UNSET:
db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = disable_server_side_cursors
if config.get_bool("postgresql.use_pgbouncer", False):
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
# https://docs.djangoproject.com/en/4.0/ref/databases/#persistent-connections
db["default"]["CONN_MAX_AGE"] = None # persistent
if disable_server_side_cursors is not UNSET:
db["default"]["DISABLE_SERVER_SIDE_CURSORS"] = disable_server_side_cursors
if conn_max_age is not UNSET:
db["default"]["CONN_MAX_AGE"] = conn_max_age
for replica in config.get_keys("postgresql.read_replicas"):
_database = deepcopy(db["default"])

View File

@ -6,6 +6,8 @@ postgresql:
user: authentik
port: 5432
password: "env://POSTGRES_PASSWORD"
use_pgbouncer: false
use_pgpool: false
test:
name: test_authentik
read_replicas: {}

View File

@ -9,25 +9,20 @@ from typing import Any
from cachetools import TLRUCache, cached
from django.core.exceptions import FieldError
from django.http import HttpRequest
from django.utils.text import slugify
from django.utils.timezone import now
from guardian.shortcuts import get_anonymous_user
from rest_framework.serializers import ValidationError
from sentry_sdk import start_span
from sentry_sdk.tracing import Span
from structlog.stdlib import get_logger
from authentik.core.models import AuthenticatedSession, User
from authentik.core.models import User
from authentik.events.models import Event
from authentik.lib.expression.exceptions import ControlFlowException
from authentik.lib.utils.http import get_http_session
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import Policy, PolicyBinding
from authentik.policies.process import PolicyProcess
from authentik.policies.types import PolicyRequest, PolicyResult
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
from authentik.stages.authenticator import devices_for_user
LOGGER = get_logger()
@ -61,7 +56,6 @@ class BaseEvaluator:
"ak_logger": get_logger(self._filename).bind(),
"ak_user_by": BaseEvaluator.expr_user_by,
"ak_user_has_authenticator": BaseEvaluator.expr_func_user_has_authenticator,
"ak_create_jwt": self.expr_create_jwt,
"ip_address": ip_address,
"ip_network": ip_network,
"list_flatten": BaseEvaluator.expr_flatten,
@ -188,36 +182,6 @@ class BaseEvaluator:
proc = PolicyProcess(PolicyBinding(policy=policy), request=req, connection=None)
return proc.profiling_wrapper()
def expr_create_jwt(
self,
user: User,
provider: OAuth2Provider | str,
scopes: list[str],
validity: str = "seconds=60",
) -> str | None:
"""Issue a JWT for a given provider"""
request: HttpRequest = self._context.get("http_request")
if not request:
return None
if not isinstance(provider, OAuth2Provider):
provider = OAuth2Provider.objects.get(name=provider)
session = None
if hasattr(request, "session") and request.session.session_key:
session = AuthenticatedSession.objects.filter(
session_key=request.session.session_key
).first()
access_token = AccessToken(
provider=provider,
user=user,
expires=now() + timedelta_from_string(validity),
scope=scopes,
auth_time=now(),
session=session,
)
access_token.id_token = IDToken.new(provider, access_token, request)
access_token.save()
return access_token.token
def wrap_expression(self, expression: str) -> str:
"""Wrap expression in a function, call it, and save the result as `result`"""
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())

View File

@ -214,9 +214,6 @@ class TestConfig(TestCase):
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"DISABLE_SERVER_SIDE_CURSORS": False,
}
},
)
@ -254,9 +251,6 @@ class TestConfig(TestCase):
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"DISABLE_SERVER_SIDE_CURSORS": False,
},
"replica_0": {
"ENGINE": "authentik.root.db",
@ -272,72 +266,6 @@ class TestConfig(TestCase):
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"DISABLE_SERVER_SIDE_CURSORS": False,
},
},
)
def test_db_read_replicas_pgbouncer(self):
"""Test read replicas"""
config = ConfigLoader()
config.set("postgresql.host", "foo")
config.set("postgresql.name", "foo")
config.set("postgresql.user", "foo")
config.set("postgresql.password", "foo")
config.set("postgresql.port", "foo")
config.set("postgresql.sslmode", "foo")
config.set("postgresql.sslrootcert", "foo")
config.set("postgresql.sslcert", "foo")
config.set("postgresql.sslkey", "foo")
config.set("postgresql.test.name", "foo")
config.set("postgresql.use_pgbouncer", True)
# Read replica
config.set("postgresql.read_replicas.0.host", "bar")
# Override conn_max_age
config.set("postgresql.read_replicas.0.conn_max_age", 10)
# This isn't supported
config.set("postgresql.read_replicas.0.use_pgbouncer", False)
conf = django_db_config(config)
self.assertEqual(
conf,
{
"default": {
"DISABLE_SERVER_SIDE_CURSORS": True,
"CONN_MAX_AGE": None,
"CONN_HEALTH_CHECKS": False,
"ENGINE": "authentik.root.db",
"HOST": "foo",
"NAME": "foo",
"OPTIONS": {
"sslcert": "foo",
"sslkey": "foo",
"sslmode": "foo",
"sslrootcert": "foo",
},
"PASSWORD": "foo",
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
},
"replica_0": {
"DISABLE_SERVER_SIDE_CURSORS": True,
"CONN_MAX_AGE": 10,
"CONN_HEALTH_CHECKS": False,
"ENGINE": "authentik.root.db",
"HOST": "bar",
"NAME": "foo",
"OPTIONS": {
"sslcert": "foo",
"sslkey": "foo",
"sslmode": "foo",
"sslrootcert": "foo",
},
"PASSWORD": "foo",
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
},
},
)
@ -366,8 +294,6 @@ class TestConfig(TestCase):
{
"default": {
"DISABLE_SERVER_SIDE_CURSORS": True,
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"ENGINE": "authentik.root.db",
"HOST": "foo",
"NAME": "foo",
@ -384,8 +310,6 @@ class TestConfig(TestCase):
},
"replica_0": {
"DISABLE_SERVER_SIDE_CURSORS": True,
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"ENGINE": "authentik.root.db",
"HOST": "bar",
"NAME": "foo",
@ -438,9 +362,6 @@ class TestConfig(TestCase):
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"DISABLE_SERVER_SIDE_CURSORS": False,
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
},
"replica_0": {
"ENGINE": "authentik.root.db",
@ -456,9 +377,6 @@ class TestConfig(TestCase):
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"DISABLE_SERVER_SIDE_CURSORS": False,
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
},
},
)

View File

@ -1,15 +1,11 @@
"""Test Evaluator base functions"""
from django.test import RequestFactory, TestCase
from django.urls import reverse
from jwt import decode
from django.test import TestCase
from authentik.blueprints.tests import apply_blueprint
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_user
from authentik.core.tests.utils import create_test_admin_user
from authentik.events.models import Event
from authentik.lib.expression.evaluator import BaseEvaluator
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.models import OAuth2Provider, ScopeMapping
class TestEvaluator(TestCase):
@ -45,35 +41,3 @@ class TestEvaluator(TestCase):
event = Event.objects.filter(action="custom_foo").first()
self.assertIsNotNone(event)
self.assertEqual(event.context, {"bar": "baz", "foo": "bar"})
@apply_blueprint("system/providers-oauth2.yaml")
def test_expr_create_jwt(self):
"""Test expr_create_jwt"""
rf = RequestFactory()
user = create_test_user()
provider = OAuth2Provider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
)
provider.property_mappings.set(
ScopeMapping.objects.filter(
managed__in=[
"goauthentik.io/providers/oauth2/scope-openid",
"goauthentik.io/providers/oauth2/scope-email",
"goauthentik.io/providers/oauth2/scope-profile",
]
)
)
evaluator = BaseEvaluator(generate_id())
evaluator._context = {
"http_request": rf.get(reverse("authentik_core:root-redirect")),
"user": user,
"provider": provider.name,
}
jwt = evaluator.evaluate(
"return ak_create_jwt(user, provider, ['openid', 'email', 'profile'])"
)
decoded = decode(
jwt, provider.client_secret, algorithms=["HS256"], audience=provider.client_id
)
self.assertEqual(decoded["preferred_username"], user.username)

View File

@ -207,7 +207,7 @@ class KubernetesObjectReconciler(Generic[T]):
"app.kubernetes.io/instance": slugify(self.controller.outpost.name),
"app.kubernetes.io/managed-by": "goauthentik.io",
"app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}",
"app.kubernetes.io/version": get_version().replace("+", "-"),
"app.kubernetes.io/version": get_version(),
"goauthentik.io/outpost-name": slugify(self.controller.outpost.name),
"goauthentik.io/outpost-type": str(self.controller.outpost.type),
"goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,

View File

@ -94,7 +94,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
meta = self.get_object_meta(name=self.name)
image_name = self.controller.get_container_image()
image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets
version = get_full_version().replace("+", "-")
version = get_full_version()
return V1Deployment(
metadata=meta,
spec=V1DeploymentSpec(

View File

@ -13,7 +13,7 @@ if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@dataclass(slots=True)
@dataclass
class PrometheusServiceMonitorSpecEndpoint:
"""Prometheus ServiceMonitor endpoint spec"""
@ -21,14 +21,14 @@ class PrometheusServiceMonitorSpecEndpoint:
path: str = field(default="/metrics")
@dataclass(slots=True)
@dataclass
class PrometheusServiceMonitorSpecSelector:
"""Prometheus ServiceMonitor selector spec"""
matchLabels: dict
@dataclass(slots=True)
@dataclass
class PrometheusServiceMonitorSpec:
"""Prometheus ServiceMonitor spec"""
@ -37,7 +37,7 @@ class PrometheusServiceMonitorSpec:
selector: PrometheusServiceMonitorSpecSelector
@dataclass(slots=True)
@dataclass
class PrometheusServiceMonitorMetadata:
"""Prometheus ServiceMonitor metadata"""
@ -46,7 +46,7 @@ class PrometheusServiceMonitorMetadata:
labels: dict = field(default_factory=dict)
@dataclass(slots=True)
@dataclass
class PrometheusServiceMonitor:
"""Prometheus ServiceMonitor"""

View File

@ -1,30 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_policies_reputation",
"0007_reputation_authentik_p_identif_9434d7_idx_and_more",
),
]
operations = [
migrations.AddIndex(
model_name="reputation",
index=models.Index(fields=["expires"], name="authentik_p_expires_da493f_idx"),
),
migrations.AddIndex(
model_name="reputation",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_2ab34f_idx"),
),
migrations.AddIndex(
model_name="reputation",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_2a8ec7_idx"
),
),
]

View File

@ -96,7 +96,7 @@ class Reputation(ExpiringModel, SerializerModel):
verbose_name = _("Reputation Score")
verbose_name_plural = _("Reputation Scores")
unique_together = ("identifier", "ip")
indexes = ExpiringModel.Meta.indexes + [
indexes = [
models.Index(fields=["identifier"]),
models.Index(fields=["ip"]),
models.Index(fields=["ip", "identifier"]),

View File

@ -1,72 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
("authentik_providers_oauth2", "0026_alter_accesstoken_session_and_more"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddIndex(
model_name="accesstoken",
index=models.Index(fields=["expires"], name="authentik_p_expires_9f24a5_idx"),
),
migrations.AddIndex(
model_name="accesstoken",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_2d9205_idx"),
),
migrations.AddIndex(
model_name="accesstoken",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_c74005_idx"
),
),
migrations.AddIndex(
model_name="authorizationcode",
index=models.Index(fields=["expires"], name="authentik_p_expires_f594b2_idx"),
),
migrations.AddIndex(
model_name="authorizationcode",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_6a5e2c_idx"),
),
migrations.AddIndex(
model_name="authorizationcode",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_c0f353_idx"
),
),
migrations.AddIndex(
model_name="devicetoken",
index=models.Index(fields=["expires"], name="authentik_p_expires_961437_idx"),
),
migrations.AddIndex(
model_name="devicetoken",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_4fd278_idx"),
),
migrations.AddIndex(
model_name="devicetoken",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_cd6b1c_idx"
),
),
migrations.AddIndex(
model_name="refreshtoken",
index=models.Index(fields=["expires"], name="authentik_p_expires_c479a7_idx"),
),
migrations.AddIndex(
model_name="refreshtoken",
index=models.Index(fields=["expiring"], name="authentik_p_expirin_d4d17f_idx"),
),
migrations.AddIndex(
model_name="refreshtoken",
index=models.Index(
fields=["expiring", "expires"], name="authentik_p_expirin_acb4a5_idx"
),
),
]

View File

@ -425,7 +425,6 @@ class AuthorizationCode(SerializerModel, ExpiringModel, BaseGrantModel):
class Meta:
verbose_name = _("Authorization Code")
verbose_name_plural = _("Authorization Codes")
indexes = ExpiringModel.Meta.indexes
def __str__(self):
return f"Authorization code for {self.provider_id} for user {self.user_id}"
@ -454,7 +453,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
_id_token = models.TextField()
class Meta:
indexes = ExpiringModel.Meta.indexes + [
indexes = [
HashIndex(fields=["token"]),
]
verbose_name = _("OAuth2 Access Token")
@ -505,7 +504,7 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
)
class Meta:
indexes = ExpiringModel.Meta.indexes + [
indexes = [
HashIndex(fields=["token"]),
]
verbose_name = _("OAuth2 Refresh Token")
@ -557,7 +556,6 @@ class DeviceToken(ExpiringModel):
class Meta:
verbose_name = _("Device Token")
verbose_name_plural = _("Device Tokens")
indexes = ExpiringModel.Meta.indexes
def __str__(self):
return f"Device Token for {self.provider_id}"

View File

@ -49,9 +49,7 @@ class TesOAuth2DeviceInit(OAuthTestCase):
kwargs={
"flow_slug": self.device_flow.slug,
},
)
+ "?"
+ urlencode({"inspector": "available"}),
),
)
def test_device_init_post(self):
@ -65,9 +63,7 @@ class TesOAuth2DeviceInit(OAuthTestCase):
kwargs={
"flow_slug": self.device_flow.slug,
},
)
+ "?"
+ urlencode({"inspector": "available"}),
),
)
res = self.api_client.get(
reverse(
@ -122,9 +118,7 @@ class TesOAuth2DeviceInit(OAuthTestCase):
kwargs={
"flow_slug": provider.authorization_flow.slug,
},
)
+ "?"
+ urlencode({"inspector": "available"}),
),
},
)
@ -156,7 +150,7 @@ class TesOAuth2DeviceInit(OAuthTestCase):
},
)
+ "?"
+ urlencode({QS_KEY_CODE: token.user_code, "inspector": "available"}),
+ urlencode({QS_KEY_CODE: token.user_code}),
)
def test_device_init_denied(self):

View File

@ -12,7 +12,6 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_cert,
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.models import (
AccessToken,
ClientTypes,
IDToken,
OAuth2Provider,
RedirectURI,
@ -109,29 +108,3 @@ class TesOAuth2Revoke(OAuthTestCase):
},
)
self.assertEqual(res.status_code, 401)
def test_revoke_public(self):
"""Test revoke public client"""
self.provider.client_type = ClientTypes.PUBLIC
self.provider.save()
token: AccessToken = AccessToken.objects.create(
provider=self.provider,
user=self.user,
token=generate_id(),
auth_time=timezone.now(),
_scope="openid user profile",
_id_token=json.dumps(
asdict(
IDToken("foo", "bar"),
)
),
)
auth_public = b64encode(f"{self.provider.client_id}:{generate_id()}".encode()).decode()
res = self.client.post(
reverse("authentik_providers_oauth2:token-revoke"),
HTTP_AUTHORIZATION=f"Basic {auth_public}",
data={
"token": token.token,
},
)
self.assertEqual(res.status_code, 200)

View File

@ -178,18 +178,12 @@ def protected_resource_view(scopes: list[str]):
return wrapper
def provider_from_request(request: HttpRequest) -> tuple[OAuth2Provider | None, str, str]:
"""Get provider from Basic auth of client_id:client_secret. Does not perform authentication"""
client_id, client_secret = extract_client_auth(request)
if client_id == client_secret == "":
return None, "", ""
provider: OAuth2Provider | None = OAuth2Provider.objects.filter(client_id=client_id).first()
return provider, client_id, client_secret
def authenticate_provider(request: HttpRequest) -> OAuth2Provider | None:
"""Attempt to authenticate via Basic auth of client_id:client_secret"""
provider, client_id, client_secret = provider_from_request(request)
client_id, client_secret = extract_client_auth(request)
if client_id == client_secret == "":
return None
provider: OAuth2Provider | None = OAuth2Provider.objects.filter(client_id=client_id).first()
if not provider:
return None
if client_id != provider.client_id or client_secret != provider.client_secret:

View File

@ -9,12 +9,8 @@ from django.views.decorators.csrf import csrf_exempt
from structlog.stdlib import get_logger
from authentik.providers.oauth2.errors import TokenRevocationError
from authentik.providers.oauth2.models import AccessToken, ClientTypes, OAuth2Provider, RefreshToken
from authentik.providers.oauth2.utils import (
TokenResponse,
authenticate_provider,
provider_from_request,
)
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider, RefreshToken
from authentik.providers.oauth2.utils import TokenResponse, authenticate_provider
LOGGER = get_logger()
@ -31,9 +27,7 @@ class TokenRevocationParams:
"""Extract required Parameters from HTTP Request"""
raw_token = request.POST.get("token")
provider, _, _ = provider_from_request(request)
if provider and provider.client_type == ClientTypes.CONFIDENTIAL:
provider = authenticate_provider(request)
provider = authenticate_provider(request)
if not provider:
raise TokenRevocationError("invalid_client")

View File

@ -15,7 +15,7 @@ if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@dataclass(slots=True)
@dataclass
class TraefikMiddlewareSpecForwardAuth:
"""traefik middleware forwardAuth spec"""
@ -28,14 +28,14 @@ class TraefikMiddlewareSpecForwardAuth:
trustForwardHeader: bool = field(default=True)
@dataclass(slots=True)
@dataclass
class TraefikMiddlewareSpec:
"""Traefik middleware spec"""
forwardAuth: TraefikMiddlewareSpecForwardAuth
@dataclass(slots=True)
@dataclass
class TraefikMiddlewareMetadata:
"""Traefik Middleware metadata"""
@ -44,7 +44,7 @@ class TraefikMiddlewareMetadata:
labels: dict = field(default_factory=dict)
@dataclass(slots=True)
@dataclass
class TraefikMiddleware:
"""Traefik Middleware"""

View File

@ -16,7 +16,6 @@ from rest_framework.decorators import action
from rest_framework.fields import CharField, FileField, SerializerMethodField
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import AllowAny
from rest_framework.renderers import BaseRenderer, JSONRenderer
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import PrimaryKeyRelatedField, ValidationError
@ -39,16 +38,6 @@ from authentik.sources.saml.processors.constants import SAML_BINDING_POST, SAML_
LOGGER = get_logger()
class RawXMLDataRenderer(BaseRenderer):
"""Renderer to allow application/xml as value for 'Accept' in the metadata endpoint."""
media_type = "application/xml"
format = "xml"
def render(self, data, accepted_media_type=None, renderer_context=None):
return data
class SAMLProviderSerializer(ProviderSerializer):
"""SAMLProvider Serializer"""
@ -65,23 +54,9 @@ class SAMLProviderSerializer(ProviderSerializer):
if "request" not in self._context:
return ""
request: HttpRequest = self._context["request"]._request
try:
return request.build_absolute_uri(
reverse(
"authentik_providers_saml:metadata-download",
kwargs={"application_slug": instance.application.slug},
)
)
except Provider.application.RelatedObjectDoesNotExist:
return request.build_absolute_uri(
reverse(
"authentik_api:samlprovider-metadata",
kwargs={
"pk": instance.pk,
},
)
+ "?download"
)
return request.build_absolute_uri(
reverse("authentik_api:samlprovider-metadata", kwargs={"pk": instance.pk}) + "?download"
)
def get_url_sso_post(self, instance: SAMLProvider) -> str:
"""Get SSO Post URL"""
@ -249,21 +224,9 @@ class SAMLProviderViewSet(UsedByMixin, ModelViewSet):
],
description="Optionally force the metadata to only include one binding.",
),
# Explicitly excluded, because otherwise spectacular automatically
# add it when using multiple renderer_classes
OpenApiParameter(
name="format",
exclude=True,
required=False,
),
],
)
@action(
methods=["GET"],
detail=True,
permission_classes=[AllowAny],
renderer_classes=[JSONRenderer, RawXMLDataRenderer],
)
@action(methods=["GET"], detail=True, permission_classes=[AllowAny])
def metadata(self, request: Request, pk: int) -> Response:
"""Return metadata as XML string"""
# We don't use self.get_object() on purpose as this view is un-authenticated
@ -281,9 +244,9 @@ class SAMLProviderViewSet(UsedByMixin, ModelViewSet):
f'attachment; filename="{provider.name}_authentik_meta.xml"'
)
return response
return Response({"metadata": metadata}, content_type="application/json")
return Response({"metadata": metadata})
except Provider.application.RelatedObjectDoesNotExist:
return Response({"metadata": ""}, content_type="application/json")
return Response({"metadata": ""})
@permission_required(
None,

View File

@ -256,7 +256,7 @@ class AssertionProcessor:
assertion.attrib["IssueInstant"] = self._issue_instant
assertion.append(self.get_issuer())
if self.provider.signing_kp and self.provider.sign_assertion:
if self.provider.signing_kp:
sign_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
self.provider.signature_algorithm, xmlsec.constants.TransformRsaSha1
)
@ -295,18 +295,6 @@ class AssertionProcessor:
response.append(self.get_issuer())
if self.provider.signing_kp and self.provider.sign_response:
sign_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
self.provider.signature_algorithm, xmlsec.constants.TransformRsaSha1
)
signature = xmlsec.template.create(
response,
xmlsec.constants.TransformExclC14N,
sign_algorithm_transform,
ns=xmlsec.constants.DSigNs,
)
response.append(signature)
status = SubElement(response, f"{{{NS_SAML_PROTOCOL}}}Status")
status_code = SubElement(status, f"{{{NS_SAML_PROTOCOL}}}StatusCode")
status_code.attrib["Value"] = "urn:oasis:names:tc:SAML:2.0:status:Success"

View File

@ -104,22 +104,6 @@ class TestSAMLProviderAPI(APITestCase):
)
self.assertEqual(200, response.status_code)
self.assertIn("Content-Disposition", response)
# Test download with Accept: application/xml
response = self.client.get(
reverse("authentik_api:samlprovider-metadata", kwargs={"pk": provider.pk})
+ "?download",
HTTP_ACCEPT="application/xml",
)
self.assertEqual(200, response.status_code)
self.assertIn("Content-Disposition", response)
response = self.client.get(
reverse("authentik_api:samlprovider-metadata", kwargs={"pk": provider.pk})
+ "?download",
HTTP_ACCEPT="application/xml;charset=UTF-8",
)
self.assertEqual(200, response.status_code)
self.assertIn("Content-Disposition", response)
def test_metadata_invalid(self):
"""Test metadata export (invalid)"""
@ -137,11 +121,6 @@ class TestSAMLProviderAPI(APITestCase):
reverse("authentik_api:samlprovider-metadata", kwargs={"pk": "abc"}),
)
self.assertEqual(404, response.status_code)
response = self.client.get(
reverse("authentik_api:samlprovider-metadata", kwargs={"pk": provider.pk}),
HTTP_ACCEPT="application/invalid-mime-type",
)
self.assertEqual(406, response.status_code)
def test_import_success(self):
"""Test metadata import (success case)"""

View File

@ -2,10 +2,8 @@
from base64 import b64encode
from defusedxml.lxml import fromstring
from django.http.request import QueryDict
from django.test import TestCase
from lxml import etree # nosec
from authentik.blueprints.tests import apply_blueprint
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
@ -13,14 +11,12 @@ from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import get_request
from authentik.lib.xml import lxml_from_string
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
from authentik.providers.saml.processors.assertion import AssertionProcessor
from authentik.providers.saml.processors.authn_request_parser import AuthNRequestParser
from authentik.sources.saml.exceptions import MismatchedRequestID
from authentik.sources.saml.models import SAMLSource
from authentik.sources.saml.processors.constants import (
NS_MAP,
SAML_BINDING_REDIRECT,
SAML_NAME_ID_FORMAT_EMAIL,
SAML_NAME_ID_FORMAT_UNSPECIFIED,
@ -189,19 +185,6 @@ class TestAuthNRequest(TestCase):
self.assertEqual(response.count(response_proc._assertion_id), 2)
self.assertEqual(response.count(response_proc._response_id), 2)
schema = etree.XMLSchema(
etree.parse("schemas/saml-schema-protocol-2.0.xsd", parser=etree.XMLParser()) # nosec
)
self.assertTrue(schema.validate(lxml_from_string(response)))
response_xml = fromstring(response)
self.assertEqual(
len(response_xml.xpath("//saml:Assertion/ds:Signature", namespaces=NS_MAP)), 1
)
self.assertEqual(
len(response_xml.xpath("//samlp:Response/ds:Signature", namespaces=NS_MAP)), 1
)
# Now parse the response (source)
http_request.POST = QueryDict(mutable=True)
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()

View File

@ -5,7 +5,6 @@ from django.contrib.auth.models import Permission
from django.db.models import QuerySet
from django_filters.filters import ModelChoiceFilter
from django_filters.filterset import FilterSet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
CharField,
@ -14,8 +13,6 @@ from rest_framework.fields import (
ReadOnlyField,
SerializerMethodField,
)
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.permissions import IsAuthenticated
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
@ -95,9 +92,7 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet):
queryset = Permission.objects.none()
serializer_class = PermissionSerializer
ordering = ["name"]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
filterset_class = PermissionFilter
permission_classes = [IsAuthenticated]
search_fields = [
"codename",
"content_type__model",

View File

@ -1,15 +1,10 @@
"""RBAC API Filter"""
from django.conf import settings
from django.db.models import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.exceptions import PermissionDenied
from rest_framework.request import Request
from rest_framework.views import APIView
from rest_framework_guardian.filters import ObjectPermissionsFilter
from authentik.api.authentication import validate_auth
from authentik.core.models import UserTypes
@ -17,7 +12,7 @@ class ObjectFilter(ObjectPermissionsFilter):
"""Object permission filter that grants global permission higher priority than
per-object permissions"""
def filter_queryset(self, request: Request, queryset: QuerySet, view: APIView) -> QuerySet:
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
permission = self.perm_format % {
"app_label": queryset.model._meta.app_label,
"model_name": queryset.model._meta.model_name,
@ -26,9 +21,6 @@ class ObjectFilter(ObjectPermissionsFilter):
# per-object permissions
if request.user.has_perm(permission):
return queryset
# User does not have permissions, but we have an owner field defined, so filter by that
if owner_field := getattr(view, "owner_field", None):
return queryset.filter(**{owner_field: request.user})
queryset = super().filter_queryset(request, queryset, view)
# Outposts (which are the only objects using internal service accounts)
# except requests to return an empty list when they have no objects
@ -40,17 +32,3 @@ class ObjectFilter(ObjectPermissionsFilter):
# and also no object permissions assigned (directly or via role)
raise PermissionDenied()
return queryset
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)

View File

@ -15,17 +15,6 @@ class ObjectPermissions(DjangoObjectPermissions):
lookup = getattr(view, "lookup_url_kwarg", None) or getattr(view, "lookup_field", None)
if lookup and lookup in view.kwargs:
return True
# Legacy behaviour:
# Allow creation of objects even without explicit permission
queryset = self._queryset(view)
required_perms = self.get_required_permissions(request.method, queryset.model)
if (
len(required_perms) == 1
and f"{queryset.model._meta.app_label}.add_{queryset.model._meta.model_name}"
in required_perms
and getattr(view, "rbac_allow_create_without_perm", False)
):
return True
return super().has_permission(request, view)
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
@ -35,10 +24,6 @@ class ObjectPermissions(DjangoObjectPermissions):
# Rank global permissions higher than per-object permissions
if request.user.has_perms(perms):
return True
# Allow access for owners if configured
if owner_field := getattr(view, "owner_field", None):
if getattr(obj, owner_field) == request.user:
return True
return super().has_object_permission(request, view, obj)

View File

@ -18,7 +18,6 @@ from celery.signals import (
task_prerun,
worker_ready,
)
from celery.worker.control import inspect_command
from django.conf import settings
from django.db import ProgrammingError
from django_tenants.utils import get_public_schema_name
@ -26,7 +25,6 @@ from structlog.contextvars import STRUCTLOG_KEY_PREFIX
from structlog.stdlib import get_logger
from tenant_schemas_celery.app import CeleryApp as TenantAwareCeleryApp
from authentik import get_full_version
from authentik.lib.sentry import before_send
from authentik.lib.utils.errors import exception_to_string
@ -161,12 +159,6 @@ class LivenessProbe(bootsteps.StartStopStep):
HEARTBEAT_FILE.touch()
@inspect_command(default_timeout=0.2)
def ping(state, **kwargs):
"""Ping worker(s)."""
return {"ok": "pong", "version": get_full_version()}
CELERY_APP.config_from_object(settings.CELERY)
# Load task modules from all registered Django app configs.

View File

@ -1,7 +1,10 @@
"""Kerberos Source Serializer"""
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.core.api.sources import (
GroupSourceConnectionSerializer,
GroupSourceConnectionViewSet,
@ -29,8 +32,9 @@ class UserKerberosSourceConnectionViewSet(UsedByMixin, ModelViewSet):
serializer_class = UserKerberosSourceConnectionSerializer
filterset_fields = ["source__slug"]
search_fields = ["source__slug"]
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["source__slug"]
owner_field = "user"
class GroupKerberosSourceConnectionSerializer(GroupSourceConnectionSerializer):

View File

@ -28,19 +28,17 @@ class KerberosBackend(InbuiltBackend):
if "@" in username:
username, realm = username.rsplit("@", 1)
user, source = self.auth_user(request, username, realm, **kwargs)
user, source = self.auth_user(username, realm, **kwargs)
if user:
self.set_method("kerberos", request, source=source)
return user
return None
def auth_user(
self, request: HttpRequest, username: str, realm: str | None, password: str, **filters
self, username: str, realm: str | None, password: str, **filters
) -> tuple[User | None, KerberosSource | None]:
sources = KerberosSource.objects.filter(enabled=True)
user = User.objects.filter(
usersourceconnection__source__in=sources, username=username, **filters
).first()
user = User.objects.filter(usersourceconnection__source__in=sources, **filters).first()
if user is not None:
# User found, let's get its connections for the sources that are available
@ -76,10 +74,10 @@ class KerberosBackend(InbuiltBackend):
user=user_source_connection.user,
)
user_source_connection.user.set_password(
password, sender=user_source_connection.source, request=request
password, sender=user_source_connection.source
)
user_source_connection.user.save()
return user_source_connection.user, user_source_connection.source
return user, user_source_connection.source
# Password doesn't match, onto next source
LOGGER.debug(
"failed to kinit, password invalid",

View File

@ -12,7 +12,6 @@ from django.db.models.fields import b64decode
from django.http import HttpRequest
from django.shortcuts import reverse
from django.templatetags.static import static
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from kadmin import KAdmin, KAdminApiVersion
from kadmin.exceptions import PyKAdminException
@ -174,18 +173,12 @@ class KerberosSource(Source):
def get_base_user_properties(self, principal: str, **kwargs):
localpart, _ = principal.rsplit("@", 1)
properties = {
return {
"username": localpart,
"type": UserTypes.INTERNAL,
"path": self.get_user_path(),
}
if "principal_obj" in kwargs:
princ_expiry = kwargs["principal_obj"].expire_time
properties["is_active"] = princ_expiry is None or princ_expiry > now()
return properties
def get_base_group_properties(self, group_id: str, **kwargs):
return {
"name": group_id,

View File

@ -20,15 +20,13 @@ class LDAPBackend(InbuiltBackend):
return None
for source in LDAPSource.objects.filter(enabled=True):
LOGGER.debug("LDAP Auth attempt", source=source)
user = self.auth_user(request, source, **kwargs)
user = self.auth_user(source, **kwargs)
if user:
self.set_method("ldap", request, source=source)
return user
return None
def auth_user(
self, request: HttpRequest, source: LDAPSource, password: str, **filters: str
) -> User | None:
def auth_user(self, source: LDAPSource, password: str, **filters: str) -> User | None:
"""Try to bind as either user_dn or mail with password.
Returns True on success, otherwise False"""
users = User.objects.filter(**filters)
@ -45,7 +43,7 @@ class LDAPBackend(InbuiltBackend):
if source.password_login_update_internal_password:
# Password given successfully binds to LDAP, so we save it in our Database
LOGGER.debug("Updating user's password in DB", user=user)
user.set_password(password, sender=source, request=request)
user.set_password(password, sender=source)
user.save()
return user
# Password doesn't match

View File

@ -81,12 +81,7 @@ class OAuth2Client(BaseOAuthClient):
if self.source.source_type.urls_customizable and self.source.access_token_url:
access_token_url = self.source.access_token_url
response = self.do_request(
"post",
access_token_url,
auth=(self.get_client_id(), self.get_client_secret()),
data=args,
headers=self._default_headers,
**request_kwargs,
"post", access_token_url, data=args, headers=self._default_headers, **request_kwargs
)
response.raise_for_status()
except RequestException as exc:

View File

@ -88,55 +88,6 @@ class TestSCIMUsers(APITestCase):
).exists()
)
def test_user_create_duplicate_by_username(self):
"""Test user create"""
user = create_test_user()
username = generate_id()
obj1 = {
"userName": username,
"externalId": generate_id(),
"emails": [
{
"primary": True,
"value": user.email,
}
],
}
obj2 = obj1.copy()
obj2.update({"externalId": generate_id()})
response = self.client.post(
reverse(
"authentik_sources_scim:v2-users",
kwargs={
"source_slug": self.source.slug,
},
),
data=dumps(obj1),
content_type=SCIM_CONTENT_TYPE,
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
)
self.assertEqual(response.status_code, 201)
self.assertTrue(
SCIMSourceUser.objects.filter(source=self.source, user__username=username).exists()
)
self.assertTrue(
Event.objects.filter(
action=EventAction.MODEL_CREATED, user__username=self.source.token.user.username
).exists()
)
response = self.client.post(
reverse(
"authentik_sources_scim:v2-users",
kwargs={
"source_slug": self.source.slug,
},
),
data=dumps(obj2),
content_type=SCIM_CONTENT_TYPE,
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
)
self.assertEqual(response.status_code, 409)
def test_user_property_mappings(self):
"""Test user property_mappings"""
self.source.user_property_mappings.set(

View File

@ -2,7 +2,6 @@
from uuid import uuid4
from django.db.models import Q
from django.db.transaction import atomic
from django.http import Http404, QueryDict
from django.urls import reverse
@ -114,11 +113,8 @@ class UsersView(SCIMObjectView):
def post(self, request: Request, **kwargs) -> Response:
"""Create user handler"""
connection = SCIMSourceUser.objects.filter(
Q(
Q(user__uuid=request.data.get("id"))
| Q(user__username=request.data.get("userName"))
),
source=self.source,
user__uuid=request.data.get("id"),
).first()
if connection:
self.logger.debug("Found existing user")

View File

@ -1,18 +1,20 @@
"""AuthenticatorDuoStage API Views"""
from django.http import Http404
from django_filters.rest_framework.backends import DjangoFilterBackend
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import get_objects_for_user
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.fields import CharField, ChoiceField, IntegerField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from structlog.stdlib import get_logger
from authentik.core.api.groups import GroupMemberSerializer
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.flows.api.stages import StageSerializer
@ -166,11 +168,9 @@ class AuthenticatorDuoStageViewSet(UsedByMixin, ModelViewSet):
class DuoDeviceSerializer(ModelSerializer):
"""Serializer for Duo authenticator devices"""
user = GroupMemberSerializer(read_only=True)
class Meta:
model = DuoDevice
fields = ["pk", "name", "user"]
fields = ["pk", "name"]
depth = 2
@ -189,7 +189,8 @@ class DuoDeviceViewSet(
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
class DuoAdminDeviceViewSet(ModelViewSet):

View File

@ -1,9 +1,11 @@
"""AuthenticatorSMSStage API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from authentik.core.api.groups import GroupMemberSerializer
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.flows.api.stages import StageSerializer
@ -42,11 +44,9 @@ class AuthenticatorSMSStageViewSet(UsedByMixin, ModelViewSet):
class SMSDeviceSerializer(ModelSerializer):
"""Serializer for sms authenticator devices"""
user = GroupMemberSerializer(read_only=True)
class Meta:
model = SMSDevice
fields = ["name", "pk", "phone_number", "user"]
fields = ["name", "pk", "phone_number"]
depth = 2
extra_kwargs = {
"phone_number": {"read_only": True},
@ -65,10 +65,11 @@ class SMSDeviceViewSet(
queryset = SMSDevice.objects.all()
serializer_class = SMSDeviceSerializer
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
class SMSAdminDeviceViewSet(ModelViewSet):

View File

@ -1,9 +1,11 @@
"""AuthenticatorStaticStage API Views"""
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from authentik.core.api.groups import GroupMemberSerializer
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.flows.api.stages import StageSerializer
@ -49,11 +51,10 @@ class StaticDeviceSerializer(ModelSerializer):
"""Serializer for static authenticator devices"""
token_set = StaticDeviceTokenSerializer(many=True, read_only=True)
user = GroupMemberSerializer(read_only=True)
class Meta:
model = StaticDevice
fields = ["name", "token_set", "pk", "user"]
fields = ["name", "token_set", "pk"]
class StaticDeviceViewSet(
@ -68,10 +69,11 @@ class StaticDeviceViewSet(
queryset = StaticDevice.objects.filter(confirmed=True)
serializer_class = StaticDeviceSerializer
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
class StaticAdminDeviceViewSet(ModelViewSet):

View File

@ -1,10 +1,12 @@
"""AuthenticatorTOTPStage API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.fields import ChoiceField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from authentik.core.api.groups import GroupMemberSerializer
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.flows.api.stages import StageSerializer
@ -38,14 +40,11 @@ class AuthenticatorTOTPStageViewSet(UsedByMixin, ModelViewSet):
class TOTPDeviceSerializer(ModelSerializer):
"""Serializer for totp authenticator devices"""
user = GroupMemberSerializer(read_only=True)
class Meta:
model = TOTPDevice
fields = [
"name",
"pk",
"user",
]
depth = 2
@ -62,10 +61,11 @@ class TOTPDeviceViewSet(
queryset = TOTPDevice.objects.filter(confirmed=True)
serializer_class = TOTPDeviceSerializer
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
class TOTPAdminDeviceViewSet(ModelViewSet):

View File

@ -1,9 +1,11 @@
"""AuthenticatorWebAuthnStage API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.viewsets import GenericViewSet, ModelViewSet
from authentik.core.api.groups import GroupMemberSerializer
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.stages.authenticator_webauthn.api.device_types import WebAuthnDeviceTypeSerializer
@ -14,11 +16,10 @@ class WebAuthnDeviceSerializer(ModelSerializer):
"""Serializer for WebAuthn authenticator devices"""
device_type = WebAuthnDeviceTypeSerializer(read_only=True, allow_null=True)
user = GroupMemberSerializer(read_only=True)
class Meta:
model = WebAuthnDevice
fields = ["pk", "name", "created_on", "device_type", "aaguid", "user"]
fields = ["pk", "name", "created_on", "device_type", "aaguid"]
extra_kwargs = {
"aaguid": {"read_only": True},
}
@ -39,7 +40,8 @@ class WebAuthnDeviceViewSet(
search_fields = ["name"]
filterset_fields = ["name"]
ordering = ["name"]
owner_field = "user"
permission_classes = [OwnerPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
class WebAuthnAdminDeviceViewSet(ModelViewSet):

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More