Compare commits

..

2 Commits

Author SHA1 Message Date
7a6d44d0df first pass 2024-10-22 15:10:13 -05:00
63c3da169a first rewrites 2024-09-30 18:25:34 +02:00
2320 changed files with 557516 additions and 140027 deletions

View File

@ -1,16 +1,16 @@
[bumpversion]
current_version = 2024.12.3
current_version = 2024.8.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
values =
rc
final
optional_value = final
@ -30,5 +30,3 @@ optional_value = final
[bumpversion:file:internal/constants/constants.go]
[bumpversion:file:web/src/common/constants.ts]
[bumpversion:file:lifecycle/aws/template.yaml]

View File

@ -35,6 +35,14 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -52,6 +60,18 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
global:
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -9,14 +9,11 @@ inputs:
image-arch:
required: false
description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs:
shouldPush:
description: "Whether to push the image or not"
value: ${{ steps.ev.outputs.shouldPush }}
shouldBuild:
description: "Whether to build image or not"
value: ${{ steps.ev.outputs.shouldBuild }}
sha:
description: "sha"
@ -32,24 +29,15 @@ outputs:
imageTags:
description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }}
imageTagsJSON:
description: "Docker image tags, as a JSON array"
value: ${{ steps.ev.outputs.imageTagsJSON }}
attestImageNames:
description: "Docker image names used for attestation"
value: ${{ steps.ev.outputs.attestImageNames }}
cacheTo:
description: "cache-to value for the docker build step"
value: ${{ steps.ev.outputs.cacheTo }}
imageMainTag:
description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }}
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs:
using: "composite"
@ -60,8 +48,6 @@ runs:
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py

View File

@ -2,20 +2,12 @@
import configparser
import os
from json import dumps
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
# Decide if we should push the image or not
should_push = True
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
should_push = False
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
# Don't push on the internal repo
should_push = False
should_build = str(len(os.environ.get("DOCKER_USERNAME", "")) > 0).lower()
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
@ -49,7 +41,7 @@ if is_release:
]
else:
suffix = ""
if image_arch:
if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
@ -71,31 +63,12 @@ def get_attest_image_names(image_with_tags: list[str]):
return ",".join(set(image_tags))
# Generate `cache-to` param
cache_to = ""
if should_push:
_cache_tag = "buildcache"
if image_arch:
_cache_tag += f"-{image_arch}"
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
print(f"shouldBuild={should_build}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={','.join(image_tags)}", file=_output)
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -1,18 +1,7 @@
#!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# Non-pushing PR
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
python $SCRIPT_DIR/push_vars.py
# Pushing PR/main
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
DOCKER_USERNAME=foo \
python $SCRIPT_DIR/push_vars.py

View File

@ -14,7 +14,7 @@ runs:
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
with:
@ -35,7 +35,7 @@ runs:
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d
poetry install --sync
poetry install
cd web && npm ci
- name: Generate config
shell: poetry run python {0}

View File

@ -23,6 +23,7 @@ updates:
- package-ecosystem: npm
directories:
- "/web"
- "/tests/wdio"
- "/web/sfe"
schedule:
interval: daily
@ -82,16 +83,6 @@ updates:
docusaurus:
patterns:
- "@docusaurus/*"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "lifecycle/aws:"
labels:
- dependencies
- package-ecosystem: pip
directory: "/"
schedule:

View File

@ -1,7 +1,7 @@
<!--
👋 Hi there! Welcome.
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
-->
## Details

View File

@ -1,96 +0,0 @@
# Re-usable workflow for a single-architecture build
name: Single-arch Container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
image_arch:
required: true
type: string
runs-on:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: false
type: boolean
release:
default: false
type: boolean
outputs:
image-digest:
value: ${{ jobs.build.outputs.image-digest }}
jobs:
build:
name: Build ${{ inputs.image_arch }}
runs-on: ${{ inputs.runs-on }}
outputs:
image-digest: ${{ steps.push.outputs.digest }}
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.4.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: generate ts client
if: ${{ !inputs.release }}
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -1,104 +0,0 @@
# Re-usable workflow for a multi-architecture build
name: Multi-arch container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: true
type: boolean
release:
default: false
type: boolean
outputs: {}
jobs:
build-server-amd64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: amd64
runs-on: ubuntu-latest
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
build-server-arm64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: arm64
runs-on: ubuntu-22.04-arm
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
get-tags:
runs-on: ubuntu-latest
needs:
- build-server-amd64
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
needs:
- get-tags
- build-server-amd64
- build-server-arm64
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
id-token: write

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token

View File

@ -1,46 +0,0 @@
name: authentik-ci-aws-cfn
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
check-changes-applied:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@v4
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
run: |
npm ci
- name: Check changes have been applied
run: |
poetry run make aws-cfn
git diff --exit-code
ci-aws-cfn-mark:
if: always()
needs:
- check-changes-applied
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}

View File

@ -1,28 +0,0 @@
---
name: authentik-ci-main-daily
on:
workflow_dispatch:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
jobs:
test-container:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- docs
- version-2024-12
- version-2024-10
steps:
- uses: actions/checkout@v4
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
${current}/scripts/test_docker.sh

View File

@ -43,26 +43,15 @@ jobs:
uses: ./.github/actions/setup
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-make-seed:
runs-on: ubuntu-latest
steps:
- id: seed
run: |
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
outputs:
seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
with:
@ -104,23 +93,18 @@ jobs:
env:
# Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
poetry run make ci-test
poetry run make test
test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
@ -128,14 +112,11 @@ jobs:
with:
postgresql_version: ${{ matrix.psql }}
- name: run unittest
env:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
poetry run make ci-test
poetry run make test
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: unit
token: ${{ secrets.CODECOV_TOKEN }}
@ -153,13 +134,13 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0
uses: helm/kind-action@v1.10.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: integration
token: ${{ secrets.CODECOV_TOKEN }}
@ -199,7 +180,7 @@ jobs:
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
docker compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v4
with:
@ -217,7 +198,7 @@ jobs:
poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v5
uses: codecov/codecov-action@v4
with:
flags: e2e
token: ${{ secrets.CODECOV_TOKEN }}
@ -228,7 +209,6 @@ jobs:
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark:
if: always()
needs:
- lint
- test-migrations
@ -238,22 +218,70 @@ jobs:
- test-e2e
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
- run: echo mark
build:
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
with:
image_name: ghcr.io/goauthentik/dev-server
release: false
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
platforms: linux/${{ matrix.arch }}
- uses: actions/attest-build-provenance@v1
id: attest
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
pr-comment:
needs:
- build
@ -275,7 +303,7 @@ jobs:
with:
image-name: ghcr.io/goauthentik/dev-server
- name: Comment on PR
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: ./.github/actions/comment-pr-instructions
with:
tag: ${{ steps.ev.outputs.imageMainTag }}

View File

@ -49,15 +49,12 @@ jobs:
run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark:
if: always()
needs:
- lint-golint
- test-unittest
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
- run: echo mark
build-container:
timeout-minutes: 120
needs:
@ -72,7 +69,7 @@ jobs:
- rac
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -82,7 +79,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.4.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -93,7 +90,7 @@ jobs:
with:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
@ -107,16 +104,16 @@ jobs:
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@v2
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@v1
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -24,11 +24,17 @@ jobs:
- prettier-check
project:
- web
- tests/wdio
include:
- command: tsc
project: web
- command: lit-analyse
project: web
exclude:
- command: lint:lockfile
project: tests/wdio
- command: tsc
project: tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
@ -44,7 +50,15 @@ jobs:
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
ci-web-mark:
needs:
- lint
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
needs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@ -60,16 +74,6 @@ jobs:
- name: build
working-directory: web/
run: npm run build
ci-web-mark:
if: always()
needs:
- build
- lint
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
test:
needs:
- ci-web-mark

View File

@ -62,13 +62,10 @@ jobs:
working-directory: website/
run: npm run ${{ matrix.job }}
ci-website-mark:
if: always()
needs:
- lint
- test
- build
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
- run: echo mark

View File

@ -11,7 +11,6 @@ env:
jobs:
build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token

View File

@ -7,7 +7,6 @@ on:
jobs:
clean-ghcr:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
name: Delete old unused container images
runs-on: ubuntu-latest
steps:

View File

@ -12,7 +12,6 @@ env:
jobs:
publish-source-docs:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
timeout-minutes: 120
steps:

View File

@ -11,7 +11,6 @@ permissions:
jobs:
update-next:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
environment: internal-production
steps:

View File

@ -7,23 +7,64 @@ on:
jobs:
build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
with:
image_name: ghcr.io/goauthentik/server,beryju/authentik
release: true
registry_dockerhub: true
registry_ghcr: true
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/amd64,linux/arm64
- uses: actions/attest-build-provenance@v1
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
# Needed to upload contianer images to ghcr.io
packages: write
# Needed for attestation
id-token: write
@ -42,7 +83,7 @@ jobs:
with:
go-version-file: "go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.4.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
@ -78,7 +119,7 @@ jobs:
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@v2
- uses: actions/attest-build-provenance@v1
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
@ -128,27 +169,6 @@ jobs:
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }}
upload-aws-cfn-template:
permissions:
# Needed for AWS login
id-token: write
contents: read
needs:
- build-server
- build-outpost
env:
AWS_REGION: eu-central-1
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
- name: Upload template
run: |
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release:
needs:
- build-server

View File

@ -14,7 +14,16 @@ jobs:
- uses: actions/checkout@v4
- name: Pre-release test
run: |
make test-docker
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v2
with:

View File

@ -1,21 +0,0 @@
name: "authentik-repo-mirror"
on: [push, delete]
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: pixta-dev/repository-mirroring-action@v1
with:
target_repo_url:
git@github.com:goauthentik/authentik-internal.git
ssh_private_key:
${{ secrets.GH_MIRROR_KEY }}
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,8 +1,8 @@
name: "authentik-repo-stale"
name: 'authentik-repo-stale'
on:
schedule:
- cron: "30 1 * * *"
- cron: '30 1 * * *'
workflow_dispatch:
permissions:
@ -11,7 +11,6 @@ permissions:
jobs:
stale:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- id: generate_token
@ -25,7 +24,7 @@ jobs:
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
stale-issue-label: status/stale
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you

3
.gitignore vendored
View File

@ -209,6 +209,3 @@ source_docs/
### Golang ###
/vendor/
### Docker ###
docker-compose.override.yml

View File

@ -2,7 +2,6 @@
"recommendations": [
"bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
@ -11,12 +10,12 @@
"Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv",
"ms-python.black-formatter",
"ms-python.black-formatter",
"ms-python.debugpy",
"charliermarsh.ruff",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx",
"unifiedjs.vscode-mdx"
]
}

66
.vscode/launch.json vendored
View File

@ -2,76 +2,26 @@
"version": "0.2.0",
"configurations": [
{
"name": "Debug: Attach Server Core",
"type": "debugpy",
"name": "Python: PDB attach Server",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6800
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Attach Worker",
"type": "debugpy",
"name": "Python: PDB attach Worker",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 9901
"port": 6900
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"justMyCode": true,
"django": true
},
{
"name": "Debug: Start Server Router",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/server",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start LDAP Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/ldap",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Proxy Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/proxy",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start RAC Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/rac",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Radius Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/radius",
"cwd": "${workspaceFolder}"
}
]
}

View File

@ -6,7 +6,6 @@
"authn",
"entra",
"goauthentik",
"jwe",
"jwks",
"kubernetes",
"oidc",
@ -33,8 +32,7 @@
"!If sequence",
"!Index scalar",
"!KeyOf scalar",
"!Value scalar",
"!AtIndex scalar"
"!Value scalar"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",

View File

@ -15,23 +15,14 @@ go.mod @goauthentik/backend
go.sum @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure
Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Web
web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend
# Locale
locale/ @goauthentik/backend @goauthentik/frontend
web/xliff/ @goauthentik/backend @goauthentik/frontend
# Docs & Website
website/ @goauthentik/docs
CODE_OF_CONDUCT.md @goauthentik/docs
# Security
SECURITY.md @goauthentik/security @goauthentik/docs
website/docs/security/ @goauthentik/security @goauthentik/docs
website/docs/security/ @goauthentik/security

View File

@ -1 +1 @@
website/docs/developer-docs/index.md
website/developer-docs/index.md

View File

@ -80,7 +80,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 AS geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1"
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
FROM ghcr.io/goauthentik/fips-python:3.12.6-slim-bookworm-fips-full AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
@ -110,36 +110,21 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
pip install --no-cache cffi && \
apt-get update && \
apt-get install -y --no-install-recommends \
build-essential libffi-dev \
# Required for cryptography
curl pkg-config \
# Required for lxml
libxslt-dev zlib1g-dev \
# Required for xmlsec
libltdl-dev \
# Required for kadmin
sccache clang && \
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
. "$HOME/.cargo/env" && \
python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip poetry && \
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip uninstall cryptography -y && \
poetry install --only=main --no-ansi --no-interaction --no-root"
pip install --force-reinstall /wheels/*"
# Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
FROM ghcr.io/goauthentik/fips-python:3.12.6-slim-bookworm-fips-full AS final-image
ARG VERSION
ARG GIT_BUILD_HASH
@ -155,12 +140,10 @@ WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
@ -178,7 +161,6 @@ COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=web-builder /work/web/dist/ /web/dist/
@ -193,8 +175,9 @@ ENV TMPDIR=/dev/shm/ \
PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
GOFIPS=1
POETRY_VIRTUALENVS_CREATE=false
ENV GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

View File

@ -6,8 +6,6 @@ UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github
GO_SOURCES = cmd internal
WEB_SOURCES = web/src web/packages
DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api"
@ -22,13 +20,13 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/developer-docs/api/reference/**' \
-S '**/node_modules/**' \
-S '**/dist/**' \
$(PY_SOURCES) \
$(GO_SOURCES) \
$(WEB_SOURCES) \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/developer-docs \
website/docs \
website/integrations \
website/src
@ -48,6 +46,15 @@ help: ## Show this help
go-test:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
coverage run manage.py test --keepdb authentik
coverage html
@ -72,9 +79,6 @@ migrate: ## Run the Authentik Django server's migrations
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn:
cd lifecycle/aws && npm run aws-cfn
core-i18n-extract:
ak makemessages \
--add-location file \
@ -146,7 +150,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
@ -257,9 +261,6 @@ docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker:
BUILD=true ./scripts/test_docker.sh
#########################
## CI
#########################
@ -284,8 +285,3 @@ ci-bandit: ci--meta-debug
ci-pending-migrations: ci--meta-debug
ak makemigrations --check
ci-test: ci--meta-debug
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
coverage report
coverage xml

View File

@ -34,7 +34,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Development
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
## What authentik classifies as a CVE
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version)
| Version | Supported |
| --------- | --------- |
| 2024.10.x | ✅ |
| 2024.12.x | ✅ |
| Version | Supported |
| -------- | --------- |
| 2024.6.x | ✅ |
| 2024.8.x | ✅ |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2024.12.3"
__version__ = "2024.8.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
@ -16,5 +16,5 @@ def get_full_version() -> str:
"""Get full version, with build hash appended"""
version = __version__
if (build_hash := get_build_hash()) != "":
return f"{version}+{build_hash}"
version += "." + build_hash
return version

View File

@ -7,9 +7,7 @@ from sys import version as python_version
from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend
from django.conf import settings
from django.utils.timezone import now
from django.views.debug import SafeExceptionReporterFilter
from drf_spectacular.utils import extend_schema
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
@ -54,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer):
def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers"""
headers = {}
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
for key, value in request.META.items():
if not isinstance(value, str):
continue
actual_value = value
if raw_session in actual_value:
actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute
)
headers[key] = actual_value
headers[key] = value
return headers
def get_http_host(self, request: Request) -> str:

View File

@ -1,33 +0,0 @@
from rest_framework.permissions import IsAdminUser
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.admin.models import VersionHistory
from authentik.core.api.utils import ModelSerializer
class VersionHistorySerializer(ModelSerializer):
"""VersionHistory Serializer"""
class Meta:
model = VersionHistory
fields = [
"id",
"timestamp",
"version",
"build",
]
class VersionHistoryViewSet(ReadOnlyModelViewSet):
"""VersionHistory Viewset"""
queryset = VersionHistory.objects.all()
serializer_class = VersionHistorySerializer
permission_classes = [IsAdminUser]
filterset_fields = [
"version",
"build",
]
search_fields = ["version", "build"]
ordering = ["-timestamp"]
pagination_class = None

View File

@ -1,16 +1,12 @@
"""authentik administration overview"""
from socket import gethostname
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from packaging.version import parse
from rest_framework.fields import BooleanField, CharField
from rest_framework.fields import IntegerField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import get_full_version
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
@ -20,38 +16,11 @@ class WorkerView(APIView):
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
@extend_schema(
responses=inline_serializer(
"Worker",
fields={
"worker_id": CharField(),
"version": CharField(),
"version_matching": BooleanField(),
},
many=True,
)
)
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
our_version = parse(get_full_version())
response = []
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == our_version
response.append(
{"worker_id": key, "version": version, "version_matching": version_matching}
)
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
response.append(
{
"worker_id": f"authentik-debug@{gethostname()}",
"version": get_full_version(),
"version_matching": True,
}
)
return Response(response)
count += 1
return Response({"count": count})

View File

@ -1,10 +1,11 @@
"""authentik admin app config"""
from prometheus_client import Info
from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig
PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig):

View File

@ -1,22 +0,0 @@
"""authentik admin models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
class VersionHistory(models.Model):
id = models.BigAutoField(primary_key=True)
timestamp = models.DateTimeField()
version = models.TextField()
build = models.TextField()
class Meta:
managed = False
db_table = "authentik_version_history"
ordering = ("-timestamp",)
verbose_name = _("Version history")
verbose_name_plural = _("Version history")
default_permissions = []
def __str__(self):
return f"{self.version}.{self.build} ({self.timestamp})"

View File

@ -1,35 +1,14 @@
"""admin signals"""
from django.dispatch import receiver
from packaging.version import parse
from prometheus_client import Gauge
from authentik import get_full_version
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
GAUGE_WORKERS = Gauge(
"authentik_admin_workers",
"Currently connected workers, their versions and if they are the same version as authentik",
["version", "version_matched"],
)
_version = parse(get_full_version())
@receiver(monitoring_set)
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
worker_version_count = {}
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == _version
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
worker_version_count[version]["count"] += 1
for version, stats in worker_version_count.items():
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)

View File

@ -34,7 +34,7 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:admin_workers"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(len(body), 0)
self.assertEqual(body["count"], 0)
def test_metrics(self):
"""Test metrics API"""

View File

@ -6,7 +6,6 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet
from authentik.admin.api.workers import WorkerView
api_urlpatterns = [
@ -18,7 +17,6 @@ api_urlpatterns = [
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"),
]

View File

@ -0,0 +1,67 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
"""Filter objects by their owner"""
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)
class OwnerPermissions(BasePermission):
"""Authorize requests by an object's owner matching the requesting user"""
owner_key = "user"
def has_permission(self, request: Request, view) -> bool:
"""If the user is authenticated, we allow all requests here. For listing, the
object-level permissions are done by the filter backend"""
return request.user.is_authenticated
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
"""Check if the object's owner matches the currently logged in user"""
if not hasattr(obj, self.owner_key):
return False
owner = getattr(obj, self.owner_key)
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -7,7 +7,7 @@ API Browser - {{ brand.branding_title }}
{% endblock %}
{% block head %}
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script>
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
{% endblock %}

View File

@ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {}
valid, logs = Importer.from_string(content, context).validate()
if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError(
[
_("Failed to validate blueprint"),
*[f"- {x.event}" for x in logs],
]
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
)
return content

View File

@ -1,68 +0,0 @@
"""Test and debug Blueprints"""
import atexit
import readline
from pathlib import Path
from pprint import pformat
from sys import exit as sysexit
from textwrap import indent
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from yaml import load
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
from authentik.core.management.commands.shell import get_banner_text
from authentik.lib.utils.errors import exception_to_string
LOGGER = get_logger()
class Command(BaseCommand):
"""Test and debug Blueprints"""
lines = []
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
histfolder.mkdir(parents=True, exist_ok=True)
histfile = histfolder / Path("blueprint_shell_history")
readline.parse_and_bind("tab: complete")
readline.parse_and_bind("set editing-mode vi")
try:
readline.read_history_file(str(histfile))
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, str(histfile))
@no_translations
def handle(self, *args, **options):
"""Interactively debug blueprint files"""
self.stdout.write(get_banner_text("Blueprint shell"))
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
def do_eval():
yaml_input = "\n".join([line for line in self.lines if line])
data = load(yaml_input, BlueprintLoader)
self.stdout.write(pformat(data))
self.lines = []
while True:
try:
line = input("> ")
if line == ".eval":
do_eval()
else:
self.lines.append(line)
except EntryInvalidError as exc:
self.stdout.write("Failed to evaluate expression:")
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
except EOFError:
break
except KeyboardInterrupt:
self.stdout.write()
sysexit(0)
self.stdout.write()

View File

@ -126,7 +126,7 @@ class Command(BaseCommand):
def_name_perm = f"model_{model_path}_permissions"
def_path_perm = f"#/$defs/{def_name_perm}"
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
template = {
return {
"type": "object",
"required": ["model", "identifiers"],
"properties": {
@ -143,11 +143,6 @@ class Command(BaseCommand):
"identifiers": {"$ref": def_path},
},
}
# Meta models don't require identifiers, as there's no matching database model to find
if issubclass(model, BaseMetaModel):
del template["properties"]["identifiers"]
template["required"].remove("identifiers")
return template
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""

View File

@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
if version != 1:
return
blueprint_file.seek(0)
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first()
instance: BlueprintInstance = (
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
)
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
meta = None
if metadata:

View File

@ -146,10 +146,6 @@ entries:
]
]
nested_context: !Context context2
at_index_sequence: !AtIndex [!Context sequence, 0]
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
at_index_mapping: !AtIndex [!Context mapping, "key2"]
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
identifiers:
name: test
conditions:

View File

@ -27,8 +27,7 @@ def blueprint_tester(file_name: Path) -> Callable:
base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base)
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
validation, logs = importer.validate()
self.assertTrue(validation, logs)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
return tester

View File

@ -215,10 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
},
"nested_context": "context-nested-value",
"env_null": None,
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
}
).exists()
)

View File

@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(),
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
)

View File

@ -24,10 +24,6 @@ from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.models import PolicyBindingModel
class UNSET:
"""Used to test whether a key has not been set."""
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
serializer: Serializer = obj.serializer(obj)
@ -202,9 +198,6 @@ class Blueprint:
class YAMLTag:
"""Base class for all YAML Tags"""
def __repr__(self) -> str:
return str(self.resolve(BlueprintEntry(""), Blueprint()))
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic"""
raise NotImplementedError
@ -563,53 +556,6 @@ class Value(EnumeratedItem):
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
class AtIndex(YAMLTag):
"""Get value at index of a sequence or mapping"""
obj: YAMLTag | dict | list | tuple
attribute: int | str | YAMLTag
default: Any | UNSET
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.obj = loader.construct_object(node.value[0])
self.attribute = loader.construct_object(node.value[1])
if len(node.value) == 2: # noqa: PLR2004
self.default = UNSET
else:
self.default = loader.construct_object(node.value[2])
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.obj, YAMLTag):
obj = self.obj.resolve(entry, blueprint)
else:
obj = self.obj
if isinstance(self.attribute, YAMLTag):
attribute = self.attribute.resolve(entry, blueprint)
else:
attribute = self.attribute
if isinstance(obj, list | tuple):
try:
return obj[attribute]
except TypeError as exc:
raise EntryInvalidError.from_entry(
f"Invalid index for list: {attribute}", entry
) from exc
except IndexError as exc:
if self.default is UNSET:
raise EntryInvalidError.from_entry(
f"Index out of range: {attribute}", entry
) from exc
return self.default
if attribute in obj:
return obj[attribute]
else:
if self.default is UNSET:
raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry)
return self.default
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
@ -660,7 +606,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Enumerate", Enumerate)
self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index)
self.add_constructor("!AtIndex", AtIndex)
class EntryInvalidError(SentryIgnoredException):

View File

@ -51,11 +51,6 @@ from authentik.enterprise.providers.microsoft_entra.models import (
MicrosoftEntraProviderUser,
)
from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.enterprise.providers.ssf.models import StreamEvent
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
EndpointDevice,
EndpointDeviceConnection,
)
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.models import SystemTask
from authentik.events.utils import cleanse_dict
@ -66,12 +61,7 @@ from authentik.lib.utils.reflection import get_apps
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import (
AccessToken,
AuthorizationCode,
DeviceToken,
RefreshToken,
)
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
from authentik.rbac.models import Role
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
@ -79,7 +69,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
from authentik.tenants.models import Tenant
# Context set when the serializer is created in a blueprint context
# Update website/docs/customize/blueprints/v1/models.md when used
# Update website/developer-docs/blueprints/v1/models.md when used
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
@ -129,10 +119,6 @@ def excluded_models() -> list[type[Model]]:
GoogleWorkspaceProviderGroup,
MicrosoftEntraProviderUser,
MicrosoftEntraProviderGroup,
EndpointDevice,
EndpointDeviceConnection,
DeviceToken,
StreamEvent,
)
@ -301,11 +287,7 @@ class Importer:
serializer_kwargs = {}
model_instance = existing_models.first()
if (
not isinstance(model(), BaseMetaModel)
and model_instance
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
):
if not isinstance(model(), BaseMetaModel) and model_instance:
self.logger.debug(
"Initialise serializer with instance",
model=model,
@ -315,12 +297,11 @@ class Importer:
serializer_kwargs["instance"] = model_instance
serializer_kwargs["partial"] = True
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
msg = (
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} "
"and object exists already",
)
raise EntryInvalidError.from_entry(
ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"),
(
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
"and object exists already",
),
entry,
)
else:
@ -448,7 +429,7 @@ class Importer:
orig_import = deepcopy(self._import)
if self._import.version != 1:
self.logger.warning("Invalid blueprint version")
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)]
return False, [{"event": "Invalid blueprint version"}]
with (
transaction_rollback(),
capture_logs() as logs,

View File

@ -159,7 +159,7 @@ def blueprints_discovery(self: SystemTask, path: str | None = None):
check_blueprint_v1_file(blueprint)
count += 1
self.set_status(
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count))
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
)

View File

@ -14,10 +14,10 @@ from rest_framework.response import Response
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import SecretKeyFilter
from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.rbac.filters import SecretKeyFilter
from authentik.tenants.utils import get_current_tenant
@ -84,8 +84,8 @@ class CurrentBrandSerializer(PassiveSerializer):
matched_domain = CharField(source="domain")
branding_title = CharField()
branding_logo = CharField(source="branding_logo_url")
branding_favicon = CharField(source="branding_favicon_url")
branding_logo = CharField()
branding_favicon = CharField()
ui_footer_links = ListField(
child=FooterLinkSerializer(),
read_only=True,

View File

@ -4,7 +4,7 @@ from collections.abc import Callable
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.utils.translation import override
from django.utils.translation import activate
from authentik.brands.utils import get_brand_for_request
@ -18,14 +18,10 @@ class BrandMiddleware:
self.get_response = get_response
def __call__(self, request: HttpRequest) -> HttpResponse:
locale_to_set = None
if not hasattr(request, "brand"):
brand = get_brand_for_request(request)
request.brand = brand
locale = brand.default_locale
if locale != "":
locale_to_set = locale
if locale_to_set:
with override(locale_to_set):
return self.get_response(request)
activate(locale)
return self.get_response(request)

View File

@ -10,7 +10,6 @@ from structlog.stdlib import get_logger
from authentik.crypto.models import CertificateKeyPair
from authentik.flows.models import Flow
from authentik.lib.config import CONFIG
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
@ -72,18 +71,6 @@ class Brand(SerializerModel):
)
attributes = models.JSONField(default=dict, blank=True)
def branding_logo_url(self) -> str:
"""Get branding_logo with the correct prefix"""
if self.branding_logo.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_logo
return self.branding_logo
def branding_favicon_url(self) -> str:
"""Get branding_favicon with the correct prefix"""
if self.branding_favicon.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
return self.branding_favicon
@property
def serializer(self) -> Serializer:
from authentik.brands.api import BrandSerializer

View File

@ -1,58 +0,0 @@
"""Application Roles API Viewset"""
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import (
Application,
ApplicationEntitlement,
)
class ApplicationEntitlementSerializer(ModelSerializer):
"""ApplicationEntitlement Serializer"""
def validate_app(self, app: Application) -> Application:
"""Ensure user has permission to view"""
request: HttpRequest = self.context.get("request")
if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context:
return app
user = request.user
if user.has_perm("view_application", app) or user.has_perm(
"authentik_core.view_application"
):
return app
raise ValidationError(_("User does not have access to application."), code="invalid")
class Meta:
model = ApplicationEntitlement
fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
class ApplicationEntitlementViewSet(UsedByMixin, ModelViewSet):
"""ApplicationEntitlement Viewset"""
queryset = ApplicationEntitlement.objects.all()
serializer_class = ApplicationEntitlementSerializer
search_fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
filterset_fields = [
"pbm_uuid",
"name",
"app",
]
ordering = ["name"]

View File

@ -2,12 +2,16 @@
from typing import TypedDict
from django_filters.rest_framework import DjangoFilterBackend
from guardian.utils import get_anonymous_user
from rest_framework import mixins
from rest_framework.fields import SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import AuthenticatedSession
@ -106,4 +110,11 @@ class AuthenticatedSessionViewSet(
search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"]
owner_field = "user"
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)

View File

@ -1,55 +1,39 @@
"""Authenticator Devices API Views"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import (
BooleanField,
CharField,
DateTimeField,
IntegerField,
SerializerMethodField,
)
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
class DeviceSerializer(MetaNameSerializer):
"""Serializer for Duo authenticator devices"""
pk = CharField()
pk = IntegerField()
name = CharField()
type = SerializerMethodField()
confirmed = BooleanField()
created = DateTimeField(read_only=True)
last_updated = DateTimeField(read_only=True)
last_used = DateTimeField(read_only=True, allow_null=True)
extra_description = SerializerMethodField()
def get_type(self, instance: Device) -> str:
"""Get type of device"""
return instance._meta.label
def get_extra_description(self, instance: Device) -> str:
"""Get extra description"""
if isinstance(instance, WebAuthnDevice):
return (
instance.device_type.description
if instance.device_type
else _("Extra description not available")
)
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return ""
class DeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
@ -68,14 +52,12 @@ class AdminDeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
serializer_class = DeviceSerializer
permission_classes = []
permission_classes = [IsAdminUser]
def get_devices(self, **kwargs):
"""Get all devices in all child classes"""
for model in device_classes():
device_set = get_objects_for_user(
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
).filter(**kwargs)
device_set = model.objects.filter(**kwargs)
yield from device_set
@extend_schema(

View File

@ -38,7 +38,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
"name",
"authentication_flow",
"authorization_flow",
"invalidation_flow",
"property_mappings",
"component",
"assigned_application_slug",
@ -51,7 +50,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
]
extra_kwargs = {
"authorization_flow": {"required": True, "allow_null": False},
"invalidation_flow": {"required": True, "allow_null": False},
}

View File

@ -2,16 +2,19 @@
from collections.abc import Iterable
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.object_types import TypesMixin
from authentik.core.api.used_by import UsedByMixin
@ -85,7 +88,7 @@ class SourceViewSet(
serializer_class = SourceSerializer
lookup_field = "slug"
search_fields = ["slug", "name"]
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
filterset_fields = ["slug", "name", "managed"]
def get_queryset(self): # pragma: no cover
return Source.objects.select_subclasses()
@ -156,9 +159,9 @@ class SourceViewSet(
class UserSourceConnectionSerializer(SourceSerializer):
"""User source connection"""
"""OAuth Source Serializer"""
source_obj = SourceSerializer(read_only=True, source="source")
source = SourceSerializer(read_only=True)
class Meta:
model = UserSourceConnection
@ -166,10 +169,10 @@ class UserSourceConnectionSerializer(SourceSerializer):
"pk",
"user",
"source",
"source_obj",
"created",
]
extra_kwargs = {
"user": {"read_only": True},
"created": {"read_only": True},
}
@ -186,16 +189,17 @@ class UserSourceConnectionViewSet(
queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filterset_fields = ["user", "source__slug"]
search_fields = ["source__slug"]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["source__slug", "pk"]
owner_field = "user"
class GroupSourceConnectionSerializer(SourceSerializer):
"""Group Source Connection"""
"""Group Source Connection Serializer"""
source_obj = SourceSerializer(read_only=True)
source = SourceSerializer(read_only=True)
class Meta:
model = GroupSourceConnection
@ -203,11 +207,12 @@ class GroupSourceConnectionSerializer(SourceSerializer):
"pk",
"group",
"source",
"source_obj",
"identifier",
"created",
]
extra_kwargs = {
"group": {"read_only": True},
"identifier": {"read_only": True},
"created": {"read_only": True},
}
@ -224,7 +229,8 @@ class GroupSourceConnectionViewSet(
queryset = GroupSourceConnection.objects.all()
serializer_class = GroupSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filterset_fields = ["group", "source__slug"]
search_fields = ["source__slug"]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["source__slug", "pk"]
owner_field = "user"

View File

@ -3,15 +3,18 @@
from typing import Any
from django.utils.timezone import now
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import assign_perm, get_anonymous_user
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.blueprints.api import ManagedSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
@ -135,8 +138,8 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"managed",
]
ordering = ["identifier", "expires"]
owner_field = "user"
rbac_allow_create_without_perm = True
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()

View File

@ -1,12 +1,10 @@
"""transactional application and provider creation"""
from django.apps import apps
from django.db.models import Model
from django.utils.translation import gettext as _
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.exceptions import ValidationError
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
@ -22,9 +20,8 @@ from authentik.blueprints.v1.common import (
from authentik.blueprints.v1.importer import Importer
from authentik.core.api.applications import ApplicationSerializer
from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import Application, Provider
from authentik.core.models import Provider
from authentik.lib.utils.reflection import all_subclasses
from authentik.policies.api.bindings import PolicyBindingSerializer
def get_provider_serializer_mapping():
@ -48,20 +45,6 @@ class TransactionProviderField(DictField):
"""Dictionary field which can hold provider creation data"""
class TransactionPolicyBindingSerializer(PolicyBindingSerializer):
"""PolicyBindingSerializer which does not require target as target is set implicitly"""
def validate(self, attrs):
# As the PolicyBindingSerializer checks that the correct things can be bound to a target
# but we don't have a target here as that's set by the blueprint, pass in an empty app
# which will have the correct allowed combination of group/user/policy.
attrs["target"] = Application()
return super().validate(attrs)
class Meta(PolicyBindingSerializer.Meta):
fields = [x for x in PolicyBindingSerializer.Meta.fields if x != "target"]
class TransactionApplicationSerializer(PassiveSerializer):
"""Serializer for creating a provider and an application in one transaction"""
@ -69,8 +52,6 @@ class TransactionApplicationSerializer(PassiveSerializer):
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
provider = TransactionProviderField()
policy_bindings = TransactionPolicyBindingSerializer(many=True, required=False)
_provider_model: type[Provider] = None
def validate_provider_model(self, fq_model_name: str) -> str:
@ -115,19 +96,6 @@ class TransactionApplicationSerializer(PassiveSerializer):
id="app",
)
)
for binding in attrs.get("policy_bindings", []):
binding["target"] = KeyOf(None, ScalarNode(tag="", value="app"))
for key, value in binding.items():
if not isinstance(value, Model):
continue
binding[key] = value.pk
blueprint.entries.append(
BlueprintEntry(
model="authentik_policies.policybinding",
state=BlueprintEntryDesiredState.MUST_CREATED,
identifiers=binding,
)
)
importer = Importer(blueprint, {})
try:
valid, _ = importer.validate(raise_validation_errors=True)
@ -152,7 +120,8 @@ class TransactionApplicationResponseSerializer(PassiveSerializer):
class TransactionalApplicationView(APIView):
"""Create provider and application and attach them in a single transaction"""
permission_classes = [IsAuthenticated]
# TODO: Migrate to a more specific permission
permission_classes = [IsAdminUser]
@extend_schema(
request=TransactionApplicationSerializer(),
@ -164,23 +133,8 @@ class TransactionalApplicationView(APIView):
"""Convert data into a blueprint, validate it and apply it"""
data = TransactionApplicationSerializer(data=request.data)
data.is_valid(raise_exception=True)
blueprint: Blueprint = data.validated_data
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
if not request.user.has_perm(f"{app}.add_{model}"):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
importer = Importer(blueprint, {})
importer = Importer(data.validated_data, {})
applied = importer.apply()
response = {"applied": False, "logs": []}
response["applied"] = applied

View File

@ -236,11 +236,9 @@ class UserSerializer(ModelSerializer):
"path",
"type",
"uuid",
"password_change_date",
]
extra_kwargs = {
"name": {"allow_blank": True},
"password_change_date": {"read_only": True},
}
@ -429,7 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
queryset = User.objects.none()
ordering = ["username"]
serializer_class = UserSerializer
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
search_fields = ["username", "name", "is_active", "email", "uuid"]
filterset_class = UsersFilter
def get_queryset(self):
@ -587,7 +585,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""Set password for user"""
user: User = self.get_object()
try:
user.set_password(request.data.get("password"), request=request)
user.set_password(request.data.get("password"))
user.save()
except (ValidationError, IntegrityError) as exc:
LOGGER.debug("Failed to set password", exc=exc)
@ -668,12 +666,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.impersonate")
@extend_schema(
request=inline_serializer(
"ImpersonationSerializer",
{
"reason": CharField(required=True),
},
),
request=OpenApiTypes.NONE,
responses={
"204": OpenApiResponse(description="Successfully started impersonation"),
"401": OpenApiResponse(description="Access denied"),
@ -686,26 +679,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
LOGGER.debug("User attempted to impersonate", user=request.user)
return Response(status=401)
user_to_be = self.get_object()
reason = request.data.get("reason", "")
# Check both object-level perms and global perms
if not request.user.has_perm(
"authentik_core.impersonate", user_to_be
) and not request.user.has_perm("authentik_core.impersonate"):
if not request.user.has_perm("impersonate", user_to_be):
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
return Response(status=401)
if user_to_be.pk == self.request.user.pk:
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
return Response(status=401)
if not reason and request.tenant.impersonation_require_reason:
LOGGER.debug(
"User attempted to impersonate without providing a reason", user=request.user
)
return Response(status=401)
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
Event.new(EventAction.IMPERSONATION_STARTED, reason=reason).from_http(request, user_to_be)
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
return Response(status=201)

View File

@ -44,12 +44,13 @@ class TokenBackend(InbuiltBackend):
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
) -> User | None:
try:
user = User._default_manager.get_by_natural_key(username)
except User.DoesNotExist:
# Run the default password hasher once to reduce the timing
# difference between an existing and a nonexistent user (#20760).
User().set_password(password, request=request)
User().set_password(password)
return None
tokens = Token.filter_not_expired(

View File

@ -58,7 +58,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
self._context["user"] = user
if request:
req.http_request = request
self._context["http_request"] = request
req.context.update(**kwargs)
self._context["request"] = req
self._context.update(**kwargs)

View File

@ -5,7 +5,6 @@ from typing import TextIO
from daphne.management.commands.runserver import Command as RunServer
from daphne.server import Server
from authentik.lib.debug import start_debug_server
from authentik.root.signals import post_startup, pre_startup, startup
@ -14,7 +13,6 @@ class SignalServer(Server):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
start_debug_server()
def ready_callable():
pre_startup.send(sender=self)

View File

@ -4,7 +4,6 @@ import code
import platform
import sys
import traceback
from pprint import pprint
from django.apps import apps
from django.core.management.base import BaseCommand
@ -17,9 +16,7 @@ from authentik.events.middleware import should_log_model
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
def get_banner_text(shell_type="shell") -> str:
return f"""### authentik {shell_type} ({get_full_version()})
BANNER_TEXT = f"""### authentik shell ({get_full_version()})
### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """
@ -37,9 +34,7 @@ class Command(BaseCommand):
def get_namespace(self):
"""Prepare namespace with all models"""
namespace = {
"pprint": pprint,
}
namespace = {}
# Gather Django models and constants from each app
for app in apps.get_app_configs():
@ -116,4 +111,4 @@ class Command(BaseCommand):
readline.parse_and_bind("tab: complete")
# Run interactive shell
code.interact(banner=get_banner_text(), local=namespace)
code.interact(banner=BANNER_TEXT, local=namespace)

View File

@ -9,7 +9,6 @@ from django.db import close_old_connections
from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG
from authentik.lib.debug import start_debug_server
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@ -29,7 +28,10 @@ class Command(BaseCommand):
def handle(self, **options):
LOGGER.debug("Celery options", **options)
close_old_connections()
start_debug_server()
if CONFIG.get_bool("remote_debug"):
import debugpy
debugpy.listen(("0.0.0.0", 6900)) # nosec
worker: Worker = CELERY_APP.Worker(
no_color=False,
quiet=True,

View File

@ -5,7 +5,7 @@ from contextvars import ContextVar
from uuid import uuid4
from django.http import HttpRequest, HttpResponse
from django.utils.translation import override
from django.utils.translation import activate
from sentry_sdk.api import set_tag
from structlog.contextvars import STRUCTLOG_KEY_PREFIX
@ -31,20 +31,16 @@ class ImpersonateMiddleware:
def __call__(self, request: HttpRequest) -> HttpResponse:
# No permission checks are done here, they need to be checked before
# SESSION_KEY_IMPERSONATE_USER is set.
locale_to_set = None
if request.user.is_authenticated:
locale = request.user.locale(request)
if locale != "":
locale_to_set = locale
activate(locale)
if SESSION_KEY_IMPERSONATE_USER in request.session:
request.user = request.session[SESSION_KEY_IMPERSONATE_USER]
# Ensure that the user is active, otherwise nothing will work
request.user.is_active = True
if locale_to_set:
with override(locale_to_set):
return self.get_response(request)
return self.get_response(request)

View File

@ -1,55 +0,0 @@
# Generated by Django 5.0.9 on 2024-10-02 11:35
import django.db.models.deletion
from django.db import migrations, models
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_invalidation_flow_default(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.flows.models import FlowDesignation, FlowAuthenticationRequirement
db_alias = schema_editor.connection.alias
Flow = apps.get_model("authentik_flows", "Flow")
Provider = apps.get_model("authentik_core", "Provider")
# So this flow is managed via a blueprint, bue we're in a migration so we don't want to rely on that
# since the blueprint is just an empty flow we can just create it here
# and let it be managed by the blueprint later
flow, _ = Flow.objects.using(db_alias).update_or_create(
slug="default-provider-invalidation-flow",
defaults={
"name": "Logged out of application",
"title": "You've logged out of %(app)s.",
"authentication": FlowAuthenticationRequirement.NONE,
"designation": FlowDesignation.INVALIDATION,
},
)
Provider.objects.using(db_alias).filter(invalidation_flow=None).update(invalidation_flow=flow)
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0039_source_group_matching_mode_alter_group_name_and_more"),
("authentik_flows", "0027_auto_20231028_1424"),
]
operations = [
migrations.AddField(
model_name="provider",
name="invalidation_flow",
field=models.ForeignKey(
default=None,
help_text="Flow used ending the session from a provider.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="provider_invalidation",
to="authentik_flows.flow",
),
),
migrations.RunPython(migrate_invalidation_flow_default),
]

View File

@ -1,45 +0,0 @@
# Generated by Django 5.0.9 on 2024-11-20 15:16
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0040_provider_invalidation_flow"),
("authentik_policies", "0011_policybinding_failure_result_and_more"),
]
operations = [
migrations.CreateModel(
name="ApplicationEntitlement",
fields=[
(
"policybindingmodel_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_policies.policybindingmodel",
),
),
("attributes", models.JSONField(blank=True, default=dict)),
("name", models.TextField()),
(
"app",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.application"
),
),
],
options={
"verbose_name": "Application Entitlement",
"verbose_name_plural": "Application Entitlements",
"unique_together": {("app", "name")},
},
bases=("authentik_policies.policybindingmodel", models.Model),
),
]

View File

@ -1,45 +0,0 @@
# Generated by Django 5.0.10 on 2025-01-13 18:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0041_applicationentitlement"),
]
operations = [
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["expires"], name="authentik_c_expires_08251d_idx"),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["expiring"], name="authentik_c_expirin_9cd839_idx"),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(
fields=["expiring", "expires"], name="authentik_c_expirin_195a84_idx"
),
),
migrations.AddIndex(
model_name="authenticatedsession",
index=models.Index(fields=["session_key"], name="authentik_c_session_d0f005_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(fields=["expires"], name="authentik_c_expires_a62b4b_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(fields=["expiring"], name="authentik_c_expirin_a1b838_idx"),
),
migrations.AddIndex(
model_name="token",
index=models.Index(
fields=["expiring", "expires"], name="authentik_c_expirin_ba04d9_idx"
),
),
]

View File

@ -314,32 +314,6 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
always_merger.merge(final_attributes, self.attributes)
return final_attributes
def app_entitlements(self, app: "Application | None") -> QuerySet["ApplicationEntitlement"]:
"""Get all entitlements this user has for `app`."""
if not app:
return []
all_groups = self.all_groups()
qs = app.applicationentitlement_set.filter(
Q(
Q(bindings__user=self) | Q(bindings__group__in=all_groups),
bindings__negate=False,
)
| Q(
Q(~Q(bindings__user=self), bindings__user__isnull=False)
| Q(~Q(bindings__group__in=all_groups), bindings__group__isnull=False),
bindings__negate=True,
),
bindings__enabled=True,
).order_by("name")
return qs
def app_entitlements_attributes(self, app: "Application | None") -> dict:
"""Get a dictionary containing all merged attributes from app entitlements for `app`."""
final_attributes = {}
for attrs in self.app_entitlements(app).values_list("attributes", flat=True):
always_merger.merge(final_attributes, attrs)
return final_attributes
@property
def serializer(self) -> Serializer:
from authentik.core.api.users import UserSerializer
@ -356,13 +330,11 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
"""superuser == staff user"""
return self.is_superuser # type: ignore
def set_password(self, raw_password, signal=True, sender=None, request=None):
def set_password(self, raw_password, signal=True):
if self.pk and signal:
from authentik.core.signals import password_changed
if not sender:
sender = self
password_changed.send(sender=sender, user=self, password=raw_password, request=request)
password_changed.send(sender=self, user=self, password=raw_password)
self.password_change_date = now()
return super().set_password(raw_password)
@ -419,23 +391,14 @@ class Provider(SerializerModel):
),
related_name="provider_authentication",
)
authorization_flow = models.ForeignKey(
"authentik_flows.Flow",
# Set to cascade even though null is allowed, since most providers
# still require an authorization flow set
on_delete=models.CASCADE,
null=True,
help_text=_("Flow used when authorizing this provider."),
related_name="provider_authorization",
)
invalidation_flow = models.ForeignKey(
"authentik_flows.Flow",
on_delete=models.SET_DEFAULT,
default=None,
null=True,
help_text=_("Flow used ending the session from a provider."),
related_name="provider_invalidation",
)
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
@ -599,14 +562,6 @@ class Application(SerializerModel, PolicyBindingModel):
return None
return candidates[-1]
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
providers = self.backchannel_providers.filter(
**{f"{provider_type._meta.model_name}__isnull": False},
**kwargs,
)
return getattr(providers.first(), provider_type._meta.model_name)
def __str__(self):
return str(self.name)
@ -615,31 +570,6 @@ class Application(SerializerModel, PolicyBindingModel):
verbose_name_plural = _("Applications")
class ApplicationEntitlement(AttributesMixin, SerializerModel, PolicyBindingModel):
"""Application-scoped entitlement to control authorization in an application"""
name = models.TextField()
app = models.ForeignKey(Application, on_delete=models.CASCADE)
class Meta:
verbose_name = _("Application Entitlement")
verbose_name_plural = _("Application Entitlements")
unique_together = (("app", "name"),)
def __str__(self):
return f"Application Entitlement {self.name} for app {self.app_id}"
@property
def serializer(self) -> type[Serializer]:
from authentik.core.api.application_entitlements import ApplicationEntitlementSerializer
return ApplicationEntitlementSerializer
def supported_policy_binding_targets(self):
return ["group", "user"]
class SourceUserMatchingModes(models.TextChoices):
"""Different modes a source can handle new/returning users"""
@ -854,11 +784,6 @@ class ExpiringModel(models.Model):
class Meta:
abstract = True
indexes = [
models.Index(fields=["expires"]),
models.Index(fields=["expiring"]),
models.Index(fields=["expiring", "expires"]),
]
def expire_action(self, *args, **kwargs):
"""Handler which is called when this object is expired. By
@ -914,7 +839,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
class Meta:
verbose_name = _("Token")
verbose_name_plural = _("Tokens")
indexes = ExpiringModel.Meta.indexes + [
indexes = [
models.Index(fields=["identifier"]),
models.Index(fields=["key"]),
]
@ -1014,9 +939,6 @@ class AuthenticatedSession(ExpiringModel):
class Meta:
verbose_name = _("Authenticated Session")
verbose_name_plural = _("Authenticated Sessions")
indexes = ExpiringModel.Meta.indexes + [
models.Index(fields=["session_key"]),
]
def __str__(self) -> str:
return f"Authenticated Session {self.session_key[:10]}"

View File

@ -1,9 +1,11 @@
"""Source decision helper"""
from enum import Enum
from typing import Any
from django.contrib import messages
from django.db import IntegrityError, transaction
from django.db.models.query_utils import Q
from django.http import HttpRequest, HttpResponse
from django.shortcuts import redirect
from django.urls import reverse
@ -14,11 +16,12 @@ from authentik.core.models import (
Group,
GroupSourceConnection,
Source,
SourceGroupMatchingModes,
SourceUserMatchingModes,
User,
UserSourceConnection,
)
from authentik.core.sources.mapper import SourceMapper
from authentik.core.sources.matcher import Action, SourceMatcher
from authentik.core.sources.stage import (
PLAN_CONTEXT_SOURCES_CONNECTION,
PostSourceStage,
@ -51,6 +54,16 @@ SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token"
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
class Action(Enum):
"""Actions that can be decided based on the request
and source settings"""
LINK = "link"
AUTH = "auth"
ENROLL = "enroll"
DENY = "deny"
class MessageStage(StageView):
"""Show a pre-configured message after the flow is done"""
@ -73,7 +86,6 @@ class SourceFlowManager:
source: Source
mapper: SourceMapper
matcher: SourceMatcher
request: HttpRequest
identifier: str
@ -96,9 +108,6 @@ class SourceFlowManager:
) -> None:
self.source = source
self.mapper = SourceMapper(self.source)
self.matcher = SourceMatcher(
self.source, self.user_connection_type, self.group_connection_type
)
self.request = request
self.identifier = identifier
self.user_info = user_info
@ -122,24 +131,66 @@ class SourceFlowManager:
def get_action(self, **kwargs) -> tuple[Action, UserSourceConnection | None]: # noqa: PLR0911
"""decide which action should be taken"""
new_connection = self.user_connection_type(source=self.source, identifier=self.identifier)
# When request is authenticated, always link
if self.request.user.is_authenticated:
new_connection = self.user_connection_type(
source=self.source, identifier=self.identifier
)
new_connection.user = self.request.user
new_connection = self.update_user_connection(new_connection, **kwargs)
if existing := self.user_connection_type.objects.filter(
source=self.source, identifier=self.identifier
).first():
existing = self.update_user_connection(existing)
return Action.AUTH, existing
return Action.LINK, new_connection
action, connection = self.matcher.get_user_action(self.identifier, self.user_properties)
if connection:
connection = self.update_user_connection(connection, **kwargs)
return action, connection
existing_connections = self.user_connection_type.objects.filter(
source=self.source, identifier=self.identifier
)
if existing_connections.exists():
connection = existing_connections.first()
return Action.AUTH, self.update_user_connection(connection, **kwargs)
# No connection exists, but we match on identifier, so enroll
if self.source.user_matching_mode == SourceUserMatchingModes.IDENTIFIER:
# We don't save the connection here cause it doesn't have a user assigned yet
return Action.ENROLL, self.update_user_connection(new_connection, **kwargs)
# Check for existing users with matching attributes
query = Q()
# Either query existing user based on email or username
if self.source.user_matching_mode in [
SourceUserMatchingModes.EMAIL_LINK,
SourceUserMatchingModes.EMAIL_DENY,
]:
if not self.user_properties.get("email", None):
self._logger.warning("Refusing to use none email")
return Action.DENY, None
query = Q(email__exact=self.user_properties.get("email", None))
if self.source.user_matching_mode in [
SourceUserMatchingModes.USERNAME_LINK,
SourceUserMatchingModes.USERNAME_DENY,
]:
if not self.user_properties.get("username", None):
self._logger.warning("Refusing to use none username")
return Action.DENY, None
query = Q(username__exact=self.user_properties.get("username", None))
self._logger.debug("trying to link with existing user", query=query)
matching_users = User.objects.filter(query)
# No matching users, always enroll
if not matching_users.exists():
self._logger.debug("no matching users found, enrolling")
return Action.ENROLL, self.update_user_connection(new_connection, **kwargs)
user = matching_users.first()
if self.source.user_matching_mode in [
SourceUserMatchingModes.EMAIL_LINK,
SourceUserMatchingModes.USERNAME_LINK,
]:
new_connection.user = user
new_connection = self.update_user_connection(new_connection, **kwargs)
return Action.LINK, new_connection
if self.source.user_matching_mode in [
SourceUserMatchingModes.EMAIL_DENY,
SourceUserMatchingModes.USERNAME_DENY,
]:
self._logger.info("denying source because user exists", user=user)
return Action.DENY, None
# Should never get here as default enroll case is returned above.
return Action.DENY, None # pragma: no cover
def update_user_connection(
self, connection: UserSourceConnection, **kwargs
@ -238,7 +289,13 @@ class SourceFlowManager:
self.request.GET,
flow_slug=flow_slug,
)
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
# Ensure redirect is carried through when user was trying to
# authorize application
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
NEXT_ARG_NAME, "authentik_core:if-user"
)
if PLAN_CONTEXT_REDIRECT not in flow_context:
flow_context[PLAN_CONTEXT_REDIRECT] = final_redirect
if not flow:
return bad_request_message(
@ -259,13 +316,19 @@ class SourceFlowManager:
if stages:
for stage in stages:
plan.append_stage(stage)
return plan.to_redirect(self.request, flow)
self.request.session[SESSION_KEY_PLAN] = plan
return redirect_with_qs(
"authentik_core:if-flow",
self.request.GET,
flow_slug=flow.slug,
)
def handle_auth(
self,
connection: UserSourceConnection,
) -> HttpResponse:
"""Login user and redirect."""
flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user}
return self._prepare_flow(
self.source.authentication_flow,
connection,
@ -279,11 +342,7 @@ class SourceFlowManager:
),
)
],
**{
PLAN_CONTEXT_PENDING_USER: connection.user,
PLAN_CONTEXT_PROMPT: delete_none_values(self.user_properties),
PLAN_CONTEXT_USER_PATH: self.source.get_user_path(),
},
**flow_kwargs,
)
def handle_existing_link(
@ -349,16 +408,74 @@ class SourceFlowManager:
class GroupUpdateStage(StageView):
"""Dynamically injected stage which updates the user after enrollment/authentication."""
def get_action(
self, group_id: str, group_properties: dict[str, Any | dict[str, Any]]
) -> tuple[Action, GroupSourceConnection | None]:
"""decide which action should be taken"""
new_connection = self.group_connection_type(source=self.source, identifier=group_id)
existing_connections = self.group_connection_type.objects.filter(
source=self.source, identifier=group_id
)
if existing_connections.exists():
return Action.LINK, existing_connections.first()
# No connection exists, but we match on identifier, so enroll
if self.source.group_matching_mode == SourceGroupMatchingModes.IDENTIFIER:
# We don't save the connection here cause it doesn't have a user assigned yet
return Action.ENROLL, new_connection
# Check for existing groups with matching attributes
query = Q()
if self.source.group_matching_mode in [
SourceGroupMatchingModes.NAME_LINK,
SourceGroupMatchingModes.NAME_DENY,
]:
if not group_properties.get("name", None):
LOGGER.warning(
"Refusing to use none group name", source=self.source, group_id=group_id
)
return Action.DENY, None
query = Q(name__exact=group_properties.get("name"))
LOGGER.debug(
"trying to link with existing group", source=self.source, query=query, group_id=group_id
)
matching_groups = Group.objects.filter(query)
# No matching groups, always enroll
if not matching_groups.exists():
LOGGER.debug(
"no matching groups found, enrolling", source=self.source, group_id=group_id
)
return Action.ENROLL, new_connection
group = matching_groups.first()
if self.source.group_matching_mode in [
SourceGroupMatchingModes.NAME_LINK,
]:
new_connection.group = group
return Action.LINK, new_connection
if self.source.group_matching_mode in [
SourceGroupMatchingModes.NAME_DENY,
]:
LOGGER.info(
"denying source because group exists",
source=self.source,
group=group,
group_id=group_id,
)
return Action.DENY, None
# Should never get here as default enroll case is returned above.
return Action.DENY, None # pragma: no cover
def handle_group(
self, group_id: str, group_properties: dict[str, Any | dict[str, Any]]
) -> Group | None:
action, connection = self.matcher.get_group_action(group_id, group_properties)
action, connection = self.get_action(group_id, group_properties)
if action == Action.ENROLL:
group = Group.objects.create(**group_properties)
connection.group = group
connection.save()
return group
elif action in (Action.LINK, Action.AUTH):
elif action == Action.LINK:
group = connection.group
group.update_attributes(group_properties)
connection.save()
@ -372,7 +489,6 @@ class GroupUpdateStage(StageView):
self.group_connection_type: GroupSourceConnection = (
self.executor.current_stage.group_connection_type
)
self.matcher = SourceMatcher(self.source, None, self.group_connection_type)
raw_groups: dict[str, dict[str, Any | dict[str, Any]]] = self.executor.plan.context[
PLAN_CONTEXT_SOURCE_GROUPS

View File

@ -1,152 +0,0 @@
"""Source user and group matching"""
from dataclasses import dataclass
from enum import Enum
from typing import Any
from django.db.models import Q
from structlog import get_logger
from authentik.core.models import (
Group,
GroupSourceConnection,
Source,
SourceGroupMatchingModes,
SourceUserMatchingModes,
User,
UserSourceConnection,
)
class Action(Enum):
"""Actions that can be decided based on the request and source settings"""
LINK = "link"
AUTH = "auth"
ENROLL = "enroll"
DENY = "deny"
@dataclass
class MatchableProperty:
property: str
link_mode: SourceUserMatchingModes | SourceGroupMatchingModes
deny_mode: SourceUserMatchingModes | SourceGroupMatchingModes
class SourceMatcher:
def __init__(
self,
source: Source,
user_connection_type: type[UserSourceConnection],
group_connection_type: type[GroupSourceConnection],
):
self.source = source
self.user_connection_type = user_connection_type
self.group_connection_type = group_connection_type
self._logger = get_logger().bind(source=self.source)
def get_action(
self,
object_type: type[User | Group],
matchable_properties: list[MatchableProperty],
identifier: str,
properties: dict[str, Any | dict[str, Any]],
) -> tuple[Action, UserSourceConnection | GroupSourceConnection | None]:
connection_type = None
matching_mode = None
identifier_matching_mode = None
if object_type == User:
connection_type = self.user_connection_type
matching_mode = self.source.user_matching_mode
identifier_matching_mode = SourceUserMatchingModes.IDENTIFIER
if object_type == Group:
connection_type = self.group_connection_type
matching_mode = self.source.group_matching_mode
identifier_matching_mode = SourceGroupMatchingModes.IDENTIFIER
if not connection_type or not matching_mode or not identifier_matching_mode:
return Action.DENY, None
new_connection = connection_type(source=self.source, identifier=identifier)
existing_connections = connection_type.objects.filter(
source=self.source, identifier=identifier
)
if existing_connections.exists():
return Action.AUTH, existing_connections.first()
# No connection exists, but we match on identifier, so enroll
if matching_mode == identifier_matching_mode:
# We don't save the connection here cause it doesn't have a user/group assigned yet
return Action.ENROLL, new_connection
# Check for existing users with matching attributes
query = Q()
for matchable_property in matchable_properties:
property = matchable_property.property
if matching_mode in [matchable_property.link_mode, matchable_property.deny_mode]:
if not properties.get(property, None):
self._logger.warning(
"Refusing to use none property", identifier=identifier, property=property
)
return Action.DENY, None
query_args = {
f"{property}__exact": properties[property],
}
query = Q(**query_args)
self._logger.debug(
"Trying to link with existing object", query=query, identifier=identifier
)
matching_objects = object_type.objects.filter(query)
# Not matching objects, always enroll
if not matching_objects.exists():
self._logger.debug("No matching objects found, enrolling")
return Action.ENROLL, new_connection
obj = matching_objects.first()
if matching_mode in [mp.link_mode for mp in matchable_properties]:
attr = None
if object_type == User:
attr = "user"
if object_type == Group:
attr = "group"
setattr(new_connection, attr, obj)
return Action.LINK, new_connection
if matching_mode in [mp.deny_mode for mp in matchable_properties]:
self._logger.info("Denying source because object exists", obj=obj)
return Action.DENY, None
# Should never get here as default enroll case is returned above.
return Action.DENY, None # pragma: no cover
def get_user_action(
self, identifier: str, properties: dict[str, Any | dict[str, Any]]
) -> tuple[Action, UserSourceConnection | None]:
return self.get_action(
User,
[
MatchableProperty(
"username",
SourceUserMatchingModes.USERNAME_LINK,
SourceUserMatchingModes.USERNAME_DENY,
),
MatchableProperty(
"email", SourceUserMatchingModes.EMAIL_LINK, SourceUserMatchingModes.EMAIL_DENY
),
],
identifier,
properties,
)
def get_group_action(
self, identifier: str, properties: dict[str, Any | dict[str, Any]]
) -> tuple[Action, GroupSourceConnection | None]:
return self.get_action(
Group,
[
MatchableProperty(
"name", SourceGroupMatchingModes.NAME_LINK, SourceGroupMatchingModes.NAME_DENY
),
],
identifier,
properties,
)

View File

@ -9,9 +9,6 @@
versionFamily: "{{ version_family }}",
versionSubdomain: "{{ version_subdomain }}",
build: "{{ build }}",
api: {
base: "{{ base_url }}",
},
};
window.addEventListener("DOMContentLoaded", function () {
{% for message in messages %}

View File

@ -9,14 +9,14 @@
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
<link rel="icon" href="{{ brand.branding_favicon_url }}">
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
<link rel="icon" href="{{ brand.branding_favicon }}">
<link rel="shortcut icon" href="{{ brand.branding_favicon }}">
{% block head_before %}
{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
{% versioned_script "dist/poly-%v.js" %}
{% versioned_script "dist/standalone/loading/index-%v.js" %}
{% block head %}
{% endblock %}
<meta name="sentry-trace" content="{{ sentry_trace }}" />

View File

@ -3,7 +3,7 @@
{% load authentik_core %}
{% block head %}
<script src="{% versioned_script 'dist/admin/AdminInterface-%v.js' %}" type="module"></script>
{% versioned_script "dist/admin/AdminInterface-%v.js" %}
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
{% include "base/header_js.html" %}

View File

@ -0,0 +1,43 @@
{% extends 'login/base_full.html' %}
{% load static %}
{% load i18n %}
{% block title %}
{% trans 'End session' %} - {{ brand.branding_title }}
{% endblock %}
{% block card_title %}
{% blocktrans with application=application.name %}
You've logged out of {{ application }}.
{% endblocktrans %}
{% endblock %}
{% block card %}
<form method="POST" class="pf-c-form">
<p>
{% blocktrans with application=application.name branding_title=brand.branding_title %}
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
{% endblocktrans %}
</p>
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">
{% trans 'Go back to overview' %}
</a>
<a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary">
{% blocktrans with branding_title=brand.branding_title %}
Log out of {{ branding_title }}
{% endblocktrans %}
</a>
{% if application.get_launch_url %}
<a href="{{ application.get_launch_url }}" class="pf-c-button pf-m-secondary">
{% blocktrans with application=application.name %}
Log back into {{ application }}
{% endblocktrans %}
</a>
{% endif %}
</form>
{% endblock %}

View File

@ -3,7 +3,7 @@
{% load authentik_core %}
{% block head %}
<script src="{% versioned_script 'dist/user/UserInterface-%v.js' %}" type="module"></script>
{% versioned_script "dist/user/UserInterface-%v.js" %}
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)">
{% include "base/header_js.html" %}

View File

@ -4,7 +4,7 @@
{% load i18n %}
{% block head_before %}
<link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" />
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
{% include "base/header_js.html" %}
@ -13,7 +13,7 @@
{% block head %}
<style>
:root {
--ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}");
--ak-flow-background: url("/static/dist/assets/images/flow_background.jpg");
--pf-c-background-image--BackgroundImage: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);
@ -50,7 +50,7 @@
<div class="ak-login-container">
<main class="pf-c-login__main">
<div class="pf-c-login__main-header pf-c-brand ak-brand">
<img src="{{ brand.branding_logo_url }}" alt="authentik Logo" />
<img src="{{ brand.branding_logo }}" alt="authentik Logo" />
</div>
<header class="pf-c-login__main-header">
<h1 class="pf-c-title pf-m-3xl">

View File

@ -2,6 +2,7 @@
from django import template
from django.templatetags.static import static as static_loader
from django.utils.safestring import mark_safe
from authentik import get_full_version
@ -11,4 +12,10 @@ register = template.Library()
@register.simple_tag()
def versioned_script(path: str) -> str:
"""Wrapper around {% static %} tag that supports setting the version"""
return static_loader(path.replace("%v", get_full_version()))
returned_lines = [
(
f'<script src="{static_loader(path.replace("%v", get_full_version()))}'
'" type="module"></script>'
),
]
return mark_safe("".join(returned_lines)) # nosec

View File

@ -1,153 +0,0 @@
"""Test Application Entitlements API"""
from django.urls import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Application, ApplicationEntitlement, Group
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_user
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.policies.models import PolicyBinding
from authentik.providers.oauth2.models import OAuth2Provider
class TestApplicationEntitlements(APITestCase):
"""Test application entitlements"""
def setUp(self) -> None:
self.user = create_test_user()
self.other_user = create_test_user()
self.provider = OAuth2Provider.objects.create(
name="test",
authorization_flow=create_test_flow(),
)
self.app: Application = Application.objects.create(
name=generate_id(),
slug=generate_id(),
provider=self.provider,
)
def test_user(self):
"""Test user-direct assignment"""
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
PolicyBinding.objects.create(target=ent, user=self.user, order=0)
ents = self.user.app_entitlements(self.app)
self.assertEqual(len(ents), 1)
self.assertEqual(ents[0].name, ent.name)
def test_group(self):
"""Test direct group"""
group = Group.objects.create(name=generate_id())
self.user.ak_groups.add(group)
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
PolicyBinding.objects.create(target=ent, group=group, order=0)
ents = self.user.app_entitlements(self.app)
self.assertEqual(len(ents), 1)
self.assertEqual(ents[0].name, ent.name)
def test_group_indirect(self):
"""Test indirect group"""
parent = Group.objects.create(name=generate_id())
group = Group.objects.create(name=generate_id(), parent=parent)
self.user.ak_groups.add(group)
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
PolicyBinding.objects.create(target=ent, group=parent, order=0)
ents = self.user.app_entitlements(self.app)
self.assertEqual(len(ents), 1)
self.assertEqual(ents[0].name, ent.name)
def test_negate_user(self):
"""Test with negate flag"""
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
PolicyBinding.objects.create(target=ent, user=self.other_user, order=0, negate=True)
ents = self.user.app_entitlements(self.app)
self.assertEqual(len(ents), 1)
self.assertEqual(ents[0].name, ent.name)
def test_negate_group(self):
"""Test with negate flag"""
other_group = Group.objects.create(name=generate_id())
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
PolicyBinding.objects.create(target=ent, group=other_group, order=0, negate=True)
ents = self.user.app_entitlements(self.app)
self.assertEqual(len(ents), 1)
self.assertEqual(ents[0].name, ent.name)
def test_api_perms_global(self):
"""Test API creation with global permissions"""
assign_perm("authentik_core.add_applicationentitlement", self.user)
assign_perm("authentik_core.view_application", self.user)
self.client.force_login(self.user)
res = self.client.post(
reverse("authentik_api:applicationentitlement-list"),
data={
"name": generate_id(),
"app": self.app.pk,
},
)
self.assertEqual(res.status_code, 201)
def test_api_perms_scoped(self):
"""Test API creation with scoped permissions"""
assign_perm("authentik_core.add_applicationentitlement", self.user)
assign_perm("authentik_core.view_application", self.user, self.app)
self.client.force_login(self.user)
res = self.client.post(
reverse("authentik_api:applicationentitlement-list"),
data={
"name": generate_id(),
"app": self.app.pk,
},
)
self.assertEqual(res.status_code, 201)
def test_api_perms_missing(self):
"""Test API creation with no permissions"""
assign_perm("authentik_core.add_applicationentitlement", self.user)
self.client.force_login(self.user)
res = self.client.post(
reverse("authentik_api:applicationentitlement-list"),
data={
"name": generate_id(),
"app": self.app.pk,
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(res.content, {"app": ["User does not have access to application."]})
def test_api_bindings_policy(self):
"""Test that API doesn't allow policies to be bound to this"""
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
policy = DummyPolicy.objects.create(name=generate_id())
admin = create_test_admin_user()
self.client.force_login(admin)
response = self.client.post(
reverse("authentik_api:policybinding-list"),
data={
"target": ent.pbm_uuid,
"policy": policy.pk,
"order": 0,
},
)
self.assertJSONEqual(
response.content.decode(),
{"non_field_errors": ["One of 'group', 'user' must be set."]},
)
def test_api_bindings_group(self):
"""Test that API doesn't allow policies to be bound to this"""
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
group = Group.objects.create(name=generate_id())
admin = create_test_admin_user()
self.client.force_login(admin)
response = self.client.post(
reverse("authentik_api:policybinding-list"),
data={
"target": ent.pbm_uuid,
"group": group.pk,
"order": 0,
},
)
self.assertEqual(response.status_code, 201)
self.assertTrue(PolicyBinding.objects.filter(target=ent.pbm_uuid).exists())

View File

@ -12,7 +12,7 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.policies.models import PolicyBinding
from authentik.providers.oauth2.models import OAuth2Provider, RedirectURI, RedirectURIMatchingMode
from authentik.providers.oauth2.models import OAuth2Provider
from authentik.providers.proxy.models import ProxyProvider
from authentik.providers.saml.models import SAMLProvider
@ -24,7 +24,7 @@ class TestApplicationsAPI(APITestCase):
self.user = create_test_admin_user()
self.provider = OAuth2Provider.objects.create(
name="test",
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://some-other-domain")],
redirect_uris="http://some-other-domain",
authorization_flow=create_test_flow(),
)
self.allowed: Application = Application.objects.create(
@ -134,7 +134,6 @@ class TestApplicationsAPI(APITestCase):
"assigned_application_name": "allowed",
"assigned_application_slug": "allowed",
"authentication_flow": None,
"invalidation_flow": None,
"authorization_flow": str(self.provider.authorization_flow.pk),
"component": "ak-provider-oauth2-form",
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
@ -187,7 +186,6 @@ class TestApplicationsAPI(APITestCase):
"assigned_application_name": "allowed",
"assigned_application_slug": "allowed",
"authentication_flow": None,
"invalidation_flow": None,
"authorization_flow": str(self.provider.authorization_flow.pk),
"component": "ak-provider-oauth2-form",
"meta_model_name": "authentik_providers_oauth2.oauth2provider",

View File

@ -1,59 +0,0 @@
"""Test Devices API"""
from json import loads
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user, create_test_user
class TestDevicesAPI(APITestCase):
"""Test applications API"""
def setUp(self) -> None:
self.admin = create_test_admin_user()
self.user1 = create_test_user()
self.device1 = self.user1.staticdevice_set.create()
self.user2 = create_test_user()
self.device2 = self.user2.staticdevice_set.create()
def test_user_api(self):
"""Test user API"""
self.client.force_login(self.user1)
response = self.client.get(
reverse(
"authentik_api:device-list",
)
)
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(len(body), 1)
self.assertEqual(body[0]["pk"], str(self.device1.pk))
def test_user_api_as_admin(self):
"""Test user API"""
self.client.force_login(self.admin)
response = self.client.get(
reverse(
"authentik_api:device-list",
)
)
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(len(body), 0)
def test_admin_api(self):
"""Test admin API"""
self.client.force_login(self.admin)
response = self.client.get(
reverse(
"authentik_api:admin-device-list",
)
)
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(len(body), 2)
self.assertEqual(
{body[0]["pk"], body[1]["pk"]}, {str(self.device1.pk), str(self.device2.pk)}
)

View File

@ -29,8 +29,7 @@ class TestImpersonation(APITestCase):
reverse(
"authentik_api:user-impersonate",
kwargs={"pk": self.other_user.pk},
),
data={"reason": "some reason"},
)
)
response = self.client.get(reverse("authentik_api:user-me"))
@ -45,27 +44,6 @@ class TestImpersonation(APITestCase):
self.assertEqual(response_body["user"]["username"], self.user.username)
self.assertNotIn("original", response_body)
def test_impersonate_global(self):
"""Test impersonation with global permissions"""
new_user = create_test_user()
assign_perm("authentik_core.impersonate", new_user)
assign_perm("authentik_core.view_user", new_user)
self.client.force_login(new_user)
response = self.client.post(
reverse(
"authentik_api:user-impersonate",
kwargs={"pk": self.other_user.pk},
),
data={"reason": "some reason"},
)
self.assertEqual(response.status_code, 201)
response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode())
self.assertEqual(response_body["user"]["username"], self.other_user.username)
self.assertEqual(response_body["original"]["username"], new_user.username)
def test_impersonate_scoped(self):
"""Test impersonation with scoped permissions"""
new_user = create_test_user()
@ -77,8 +55,7 @@ class TestImpersonation(APITestCase):
reverse(
"authentik_api:user-impersonate",
kwargs={"pk": self.other_user.pk},
),
data={"reason": "some reason"},
)
)
self.assertEqual(response.status_code, 201)
@ -92,8 +69,7 @@ class TestImpersonation(APITestCase):
self.client.force_login(self.other_user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}),
data={"reason": "some reason"},
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 403)
@ -109,8 +85,7 @@ class TestImpersonation(APITestCase):
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk}),
data={"reason": "some reason"},
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
)
self.assertEqual(response.status_code, 401)
@ -123,22 +98,7 @@ class TestImpersonation(APITestCase):
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}),
data={"reason": "some reason"},
)
self.assertEqual(response.status_code, 401)
response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode())
self.assertEqual(response_body["user"]["username"], self.user.username)
def test_impersonate_reason_required(self):
"""test impersonation that user must provide reason"""
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}),
data={"reason": ""},
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 401)

View File

@ -81,22 +81,6 @@ class TestSourceFlowManager(TestCase):
reverse("authentik_core:if-user") + "#/settings;page-sources",
)
def test_authenticated_auth(self):
"""Test authenticated user linking"""
user = User.objects.create(username="foo", email="foo@bar.baz")
UserOAuthSourceConnection.objects.create(
user=user, source=self.source, identifier=self.identifier
)
request = get_request("/", user=user)
flow_manager = OAuthSourceFlowManager(
self.source, request, self.identifier, {"info": {}}, {}
)
action, connection = flow_manager.get_action()
self.assertEqual(action, Action.AUTH)
self.assertIsNotNone(connection.pk)
response = flow_manager.get_flow()
self.assertEqual(response.status_code, 302)
def test_unauthenticated_link(self):
"""Test un-authenticated user linking"""
flow_manager = OAuthSourceFlowManager(

View File

@ -1,13 +1,11 @@
"""Test Transactional API"""
from django.urls import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Application, Group
from authentik.core.tests.utils import create_test_flow, create_test_user
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.lib.generators import generate_id
from authentik.policies.models import PolicyBinding
from authentik.providers.oauth2.models import OAuth2Provider
@ -15,68 +13,12 @@ class TestTransactionalApplicationsAPI(APITestCase):
"""Test Transactional API"""
def setUp(self) -> None:
self.user = create_test_user()
assign_perm("authentik_core.add_application", self.user)
assign_perm("authentik_providers_oauth2.add_oauth2provider", self.user)
self.user = create_test_admin_user()
def test_create_transactional(self):
"""Test transactional Application + provider creation"""
self.client.force_login(self.user)
uid = generate_id()
response = self.client.put(
reverse("authentik_api:core-transactional-application"),
data={
"app": {
"name": uid,
"slug": uid,
},
"provider_model": "authentik_providers_oauth2.oauth2provider",
"provider": {
"name": uid,
"authorization_flow": str(create_test_flow().pk),
"invalidation_flow": str(create_test_flow().pk),
"redirect_uris": [],
},
},
)
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
provider = OAuth2Provider.objects.filter(name=uid).first()
self.assertIsNotNone(provider)
app = Application.objects.filter(slug=uid).first()
self.assertIsNotNone(app)
self.assertEqual(app.provider.pk, provider.pk)
def test_create_transactional_permission_denied(self):
"""Test transactional Application + provider creation (missing permissions)"""
self.client.force_login(self.user)
uid = generate_id()
response = self.client.put(
reverse("authentik_api:core-transactional-application"),
data={
"app": {
"name": uid,
"slug": uid,
},
"provider_model": "authentik_providers_saml.samlprovider",
"provider": {
"name": uid,
"authorization_flow": str(create_test_flow().pk),
"invalidation_flow": str(create_test_flow().pk),
"acs_url": "https://goauthentik.io",
},
},
)
self.assertJSONEqual(
response.content.decode(),
{"provider": "User lacks permission to create authentik_providers_saml.samlprovider"},
)
def test_create_transactional_bindings(self):
"""Test transactional Application + provider creation"""
assign_perm("authentik_policies.add_policybinding", self.user)
self.client.force_login(self.user)
uid = generate_id()
group = Group.objects.create(name=generate_id())
authorization_flow = create_test_flow()
response = self.client.put(
reverse("authentik_api:core-transactional-application"),
@ -89,10 +31,7 @@ class TestTransactionalApplicationsAPI(APITestCase):
"provider": {
"name": uid,
"authorization_flow": str(authorization_flow.pk),
"invalidation_flow": str(authorization_flow.pk),
"redirect_uris": [],
},
"policy_bindings": [{"group": group.pk, "order": 0}],
},
)
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
@ -101,10 +40,6 @@ class TestTransactionalApplicationsAPI(APITestCase):
app = Application.objects.filter(slug=uid).first()
self.assertIsNotNone(app)
self.assertEqual(app.provider.pk, provider.pk)
binding = PolicyBinding.objects.filter(target=app).first()
self.assertIsNotNone(binding)
self.assertEqual(binding.target, app)
self.assertEqual(binding.group, group)
def test_create_transactional_invalid(self):
"""Test transactional Application + provider creation"""
@ -121,46 +56,10 @@ class TestTransactionalApplicationsAPI(APITestCase):
"provider": {
"name": uid,
"authorization_flow": "",
"invalidation_flow": "",
"redirect_uris": [],
},
},
)
self.assertJSONEqual(
response.content.decode(),
{
"provider": {
"authorization_flow": ["This field may not be null."],
"invalidation_flow": ["This field may not be null."],
}
},
)
def test_create_transactional_duplicate_name_provider(self):
"""Test transactional Application + provider creation"""
self.client.force_login(self.user)
uid = generate_id()
OAuth2Provider.objects.create(
name=uid,
authorization_flow=create_test_flow(),
invalidation_flow=create_test_flow(),
)
response = self.client.put(
reverse("authentik_api:core-transactional-application"),
data={
"app": {
"name": uid,
"slug": uid,
},
"provider_model": "authentik_providers_oauth2.oauth2provider",
"provider": {
"name": uid,
"authorization_flow": str(create_test_flow().pk),
"invalidation_flow": str(create_test_flow().pk),
},
},
)
self.assertJSONEqual(
response.content.decode(),
{"provider": {"name": ["State is set to must_created and object exists already"]}},
{"provider": {"authorization_flow": ["This field may not be null."]}},
)

View File

@ -5,8 +5,8 @@ from channels.sessions import CookieMiddleware
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.urls import path
from django.views.decorators.csrf import ensure_csrf_cookie
from authentik.core.api.application_entitlements import ApplicationEntitlementViewSet
from authentik.core.api.applications import ApplicationViewSet
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
@ -24,6 +24,7 @@ from authentik.core.views.interface import (
InterfaceView,
RootRedirectView,
)
from authentik.core.views.session import EndSessionView
from authentik.flows.views.interface import FlowInterfaceView
from authentik.root.asgi_middleware import SessionMiddleware
from authentik.root.messages.consumer import MessageConsumer
@ -44,21 +45,26 @@ urlpatterns = [
# Interfaces
path(
"if/admin/",
BrandDefaultRedirectView.as_view(template_name="if/admin.html"),
ensure_csrf_cookie(BrandDefaultRedirectView.as_view(template_name="if/admin.html")),
name="if-admin",
),
path(
"if/user/",
BrandDefaultRedirectView.as_view(template_name="if/user.html"),
ensure_csrf_cookie(BrandDefaultRedirectView.as_view(template_name="if/user.html")),
name="if-user",
),
path(
"if/flow/<slug:flow_slug>/",
# FIXME: move this url to the flows app...also will cause all
# of the reverse calls to be adjusted
FlowInterfaceView.as_view(),
ensure_csrf_cookie(FlowInterfaceView.as_view()),
name="if-flow",
),
path(
"if/session-end/<slug:application_slug>/",
ensure_csrf_cookie(EndSessionView.as_view()),
name="if-session-end",
),
# Fallback for WS
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")),
path(
@ -70,7 +76,6 @@ urlpatterns = [
api_urlpatterns = [
("core/authenticated_sessions", AuthenticatedSessionViewSet),
("core/applications", ApplicationViewSet),
("core/application_entitlements", ApplicationEntitlementViewSet),
path(
"core/transactional/applications/",
TransactionalApplicationView.as_view(),

View File

@ -17,8 +17,10 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
from authentik.flows.stage import ChallengeStageView
from authentik.flows.views.executor import (
SESSION_KEY_APPLICATION_PRE,
SESSION_KEY_PLAN,
ToDefaultFlow,
)
from authentik.lib.utils.urls import redirect_with_qs
from authentik.stages.consent.stage import (
PLAN_CONTEXT_CONSENT_HEADER,
PLAN_CONTEXT_CONSENT_PERMISSIONS,
@ -56,7 +58,8 @@ class RedirectToAppLaunch(View):
except FlowNonApplicableException:
raise Http404 from None
plan.insert_stage(in_memory_stage(RedirectToAppStage))
return plan.to_redirect(request, flow)
request.session[SESSION_KEY_PLAN] = plan
return redirect_with_qs("authentik_core:if-flow", request.GET, flow_slug=flow.slug)
class RedirectToAppStage(ChallengeStageView):

View File

@ -16,7 +16,6 @@ from authentik.api.v3.config import ConfigView
from authentik.brands.api import CurrentBrandSerializer
from authentik.brands.models import Brand
from authentik.core.models import UserTypes
from authentik.lib.config import CONFIG
from authentik.policies.denied import AccessDeniedResponse
@ -52,7 +51,6 @@ class InterfaceView(TemplateView):
kwargs["version_subdomain"] = f"version-{LOCAL_VERSION.major}-{LOCAL_VERSION.minor}"
kwargs["build"] = get_build_hash()
kwargs["url_kwargs"] = self.kwargs
kwargs["base_url"] = self.request.build_absolute_uri(CONFIG.get("web.path", "/"))
return super().get_context_data(**kwargs)

View File

@ -0,0 +1,23 @@
"""authentik Session Views"""
from typing import Any
from django.shortcuts import get_object_or_404
from django.views.generic.base import TemplateView
from authentik.core.models import Application
from authentik.policies.views import PolicyAccessView
class EndSessionView(TemplateView, PolicyAccessView):
"""Allow the client to end the Session"""
template_name = "if/end_session.html"
def resolve_provider_application(self):
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
context = super().get_context_data(**kwargs)
context["application"] = self.application
return context

View File

@ -24,10 +24,10 @@ from rest_framework.fields import (
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import SecretKeyFilter
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.crypto.apps import MANAGED_KEY
@ -35,7 +35,7 @@ from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.models import CertificateKeyPair
from authentik.events.models import Event, EventAction
from authentik.rbac.decorators import permission_required
from authentik.rbac.filters import ObjectFilter, SecretKeyFilter
from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()
@ -181,10 +181,7 @@ class CertificateDataSerializer(PassiveSerializer):
class CertificateGenerationSerializer(PassiveSerializer):
"""Certificate generation parameters"""
common_name = CharField(
validators=[UniqueValidator(queryset=CertificateKeyPair.objects.all())],
source="name",
)
common_name = CharField()
subject_alt_name = CharField(required=False, allow_blank=True, label=_("Subject-alt name"))
validity_days = IntegerField(initial=365)
alg = ChoiceField(default=PrivateKeyAlg.RSA, choices=PrivateKeyAlg.choices)
@ -245,10 +242,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
def generate(self, request: Request) -> Response:
"""Generate a new, self-signed certificate-key pair"""
data = CertificateGenerationSerializer(data=request.data)
data.is_valid(raise_exception=True)
if not data.is_valid():
return Response(data.errors, status=400)
raw_san = data.validated_data.get("subject_alt_name", "")
sans = raw_san.split(",") if raw_san != "" else []
builder = CertificateBuilder(data.validated_data["name"])
builder = CertificateBuilder(data.validated_data["common_name"])
builder.alg = data.validated_data["alg"]
builder.build(
subject_alt_names=sans,

View File

@ -85,5 +85,5 @@ def certificate_discovery(self: SystemTask):
if dirty:
cert.save()
self.set_status(
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=discovered))
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": discovered})
)

Some files were not shown because too many files have changed in this diff Show More