Compare commits

..

5 Commits

Author SHA1 Message Date
36e418436b Revert "Revert "core: applications api: add option to only list apps with launch url (#10336)""
This reverts commit 1eb9d7a9d6.
2024-07-04 12:58:29 +02:00
8b0058ada0 detect RAC differently
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2024-07-04 12:58:25 +02:00
3abf1421a2 return full URL when possible
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2024-07-04 12:57:56 +02:00
70c7454c69 return authentik launch URL
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2024-07-04 12:52:26 +02:00
1eb9d7a9d6 Revert "core: applications api: add option to only list apps with launch url (#10336)"
This reverts commit 42e0ff6492.
2024-07-04 12:50:46 +02:00
1943 changed files with 80726 additions and 197330 deletions

View File

@ -1,16 +1,16 @@
[bumpversion] [bumpversion]
current_version = 2024.12.3 current_version = 2024.6.0
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize = serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n} {major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch} {major}.{minor}.{patch}
message = release: {new_version} message = release: {new_version}
tag_name = version/{new_version} tag_name = version/{new_version}
[bumpversion:part:rc_t] [bumpversion:part:rc_t]
values = values =
rc rc
final final
optional_value = final optional_value = final
@ -30,5 +30,3 @@ optional_value = final
[bumpversion:file:internal/constants/constants.go] [bumpversion:file:internal/constants/constants.go]
[bumpversion:file:web/src/common/constants.ts] [bumpversion:file:web/src/common/constants.ts]
[bumpversion:file:lifecycle/aws/template.yaml]

View File

@ -35,6 +35,14 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
``` ```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes. Afterwards, run the upgrade commands from the latest release notes.
</details> </details>
<details> <details>
@ -46,10 +54,20 @@ runs:
authentik: authentik:
outposts: outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
global: image:
image: repository: ghcr.io/goauthentik/dev-server
repository: ghcr.io/goauthentik/dev-server tag: ${{ inputs.tag }}
tag: ${{ inputs.tag }} ```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
``` ```
Afterwards, run the upgrade commands from the latest release notes. Afterwards, run the upgrade commands from the latest release notes.

View File

@ -9,14 +9,11 @@ inputs:
image-arch: image-arch:
required: false required: false
description: "Docker image arch" description: "Docker image arch"
release:
required: true
description: "True if this is a release build, false if this is a dev/PR build"
outputs: outputs:
shouldPush: shouldBuild:
description: "Whether to push the image or not" description: "Whether to build image or not"
value: ${{ steps.ev.outputs.shouldPush }} value: ${{ steps.ev.outputs.shouldBuild }}
sha: sha:
description: "sha" description: "sha"
@ -32,24 +29,9 @@ outputs:
imageTags: imageTags:
description: "Docker image tags" description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }} value: ${{ steps.ev.outputs.imageTags }}
imageTagsJSON:
description: "Docker image tags, as a JSON array"
value: ${{ steps.ev.outputs.imageTagsJSON }}
attestImageNames:
description: "Docker image names used for attestation"
value: ${{ steps.ev.outputs.attestImageNames }}
cacheTo:
description: "cache-to value for the docker build step"
value: ${{ steps.ev.outputs.cacheTo }}
imageMainTag: imageMainTag:
description: "Docker image main tag" description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }} value: ${{ steps.ev.outputs.imageMainTag }}
imageMainName:
description: "Docker image main name"
value: ${{ steps.ev.outputs.imageMainName }}
imageBuildArgs:
description: "Docker image build args"
value: ${{ steps.ev.outputs.imageBuildArgs }}
runs: runs:
using: "composite" using: "composite"
@ -60,8 +42,6 @@ runs:
env: env:
IMAGE_NAME: ${{ inputs.image-name }} IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }} IMAGE_ARCH: ${{ inputs.image-arch }}
RELEASE: ${{ inputs.release }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: | run: |
python3 ${{ github.action_path }}/push_vars.py python3 ${{ github.action_path }}/push_vars.py

View File

@ -2,20 +2,12 @@
import configparser import configparser
import os import os
from json import dumps
from time import time from time import time
parser = configparser.ConfigParser() parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg") parser.read(".bumpversion.cfg")
# Decide if we should push the image or not should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower()
should_push = True
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
should_push = False
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
# Don't push on the internal repo
should_push = False
branch_name = os.environ["GITHUB_REF"] branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "": if os.environ.get("GITHUB_HEAD_REF", "") != "":
@ -49,7 +41,7 @@ if is_release:
] ]
else: else:
suffix = "" suffix = ""
if image_arch: if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}" suffix = f"-{image_arch}"
for name in image_names: for name in image_names:
image_tags += [ image_tags += [
@ -58,44 +50,13 @@ else:
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
] ]
image_main_tag = image_tags[0].split(":")[-1] image_main_tag = image_tags[0]
image_tags_rendered = ",".join(image_tags)
def get_attest_image_names(image_with_tags: list[str]):
"""Attestation only for GHCR"""
image_tags = []
for image_name in set(name.split(":")[0] for name in image_with_tags):
if not image_name.startswith("ghcr.io"):
continue
image_tags.append(image_name)
return ",".join(set(image_tags))
# Generate `cache-to` param
cache_to = ""
if should_push:
_cache_tag = "buildcache"
if image_arch:
_cache_tag += f"-{image_arch}"
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
image_build_args = []
if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output: with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output) print(f"shouldBuild={should_build}", file=_output)
print(f"sha={sha}", file=_output) print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output) print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output) print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={','.join(image_tags)}", file=_output) print(f"imageTags={image_tags_rendered}", file=_output)
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output) print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args}", file=_output)

View File

@ -1,18 +1,7 @@
#!/bin/bash -x #!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
# Non-pushing PR
GITHUB_OUTPUT=/dev/stdout \ GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \ GITHUB_REF=ref \
GITHUB_SHA=sha \ GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \ IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
python $SCRIPT_DIR/push_vars.py
# Pushing PR/main
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
GITHUB_REPOSITORY=goauthentik/authentik \
DOCKER_USERNAME=foo \
python $SCRIPT_DIR/push_vars.py python $SCRIPT_DIR/push_vars.py

View File

@ -14,7 +14,7 @@ runs:
run: | run: |
pipx install poetry || true pipx install poetry || true
sudo apt-get update sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry - name: Setup python and restore poetry
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
@ -35,7 +35,7 @@ runs:
run: | run: |
export PSQL_TAG=${{ inputs.postgresql_version }} export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d docker compose -f .github/actions/setup/docker-compose.yml up -d
poetry install --sync poetry install
cd web && npm ci cd web && npm ci
- name: Generate config - name: Generate config
shell: poetry run python {0} shell: poetry run python {0}

View File

@ -21,9 +21,7 @@ updates:
labels: labels:
- dependencies - dependencies
- package-ecosystem: npm - package-ecosystem: npm
directories: directory: "/web"
- "/web"
- "/web/sfe"
schedule: schedule:
interval: daily interval: daily
time: "04:00" time: "04:00"
@ -32,6 +30,7 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "web:" prefix: "web:"
# TODO: deduplicate these groups
groups: groups:
sentry: sentry:
patterns: patterns:
@ -43,11 +42,9 @@ updates:
- "babel-*" - "babel-*"
eslint: eslint:
patterns: patterns:
- "@eslint/*"
- "@typescript-eslint/*" - "@typescript-eslint/*"
- "eslint-*"
- "eslint" - "eslint"
- "typescript-eslint" - "eslint-*"
storybook: storybook:
patterns: patterns:
- "@storybook/*" - "@storybook/*"
@ -55,16 +52,42 @@ updates:
esbuild: esbuild:
patterns: patterns:
- "@esbuild/*" - "@esbuild/*"
- "esbuild*"
rollup: rollup:
patterns: patterns:
- "@rollup/*" - "@rollup/*"
- "rollup-*" - "rollup-*"
- "rollup*" - package-ecosystem: npm
swc: directory: "/tests/wdio"
schedule:
interval: daily
time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
# TODO: deduplicate these groups
groups:
sentry:
patterns: patterns:
- "@swc/*" - "@sentry/*"
- "swc-*" - "@spotlightjs/*"
babel:
patterns:
- "@babel/*"
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
wdio: wdio:
patterns: patterns:
- "@wdio/*" - "@wdio/*"
@ -82,16 +105,6 @@ updates:
docusaurus: docusaurus:
patterns: patterns:
- "@docusaurus/*" - "@docusaurus/*"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "lifecycle/aws:"
labels:
- dependencies
- package-ecosystem: pip - package-ecosystem: pip
directory: "/" directory: "/"
schedule: schedule:

View File

@ -1,7 +1,7 @@
<!-- <!--
👋 Hi there! Welcome. 👋 Hi there! Welcome.
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
--> -->
## Details ## Details

View File

@ -1,96 +0,0 @@
# Re-usable workflow for a single-architecture build
name: Single-arch Container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
image_arch:
required: true
type: string
runs-on:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: false
type: boolean
release:
default: false
type: boolean
outputs:
image-digest:
value: ${{ jobs.build.outputs.image-digest }}
jobs:
build:
name: Build ${{ inputs.image_arch }}
runs-on: ${{ inputs.runs-on }}
outputs:
image-digest: ${{ steps.push.outputs.digest }}
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.4.0
- uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
image-arch: ${{ inputs.image_arch }}
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: generate ts client
if: ${{ !inputs.release }}
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
id: push
with:
context: .
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -1,104 +0,0 @@
# Re-usable workflow for a multi-architecture build
name: Multi-arch container build
on:
workflow_call:
inputs:
image_name:
required: true
type: string
registry_dockerhub:
default: false
type: boolean
registry_ghcr:
default: true
type: boolean
release:
default: false
type: boolean
outputs: {}
jobs:
build-server-amd64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: amd64
runs-on: ubuntu-latest
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
build-server-arm64:
uses: ./.github/workflows/_reusable-docker-build-single.yaml
secrets: inherit
with:
image_name: ${{ inputs.image_name }}
image_arch: arm64
runs-on: ubuntu-22.04-arm
registry_dockerhub: ${{ inputs.registry_dockerhub }}
registry_ghcr: ${{ inputs.registry_ghcr }}
release: ${{ inputs.release }}
get-tags:
runs-on: ubuntu-latest
needs:
- build-server-amd64
- build-server-arm64
outputs:
tags: ${{ steps.ev.outputs.imageTagsJSON }}
shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
merge-server:
runs-on: ubuntu-latest
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
needs:
- get-tags
- build-server-amd64
- build-server-arm64
strategy:
fail-fast: false
matrix:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps:
- uses: actions/checkout@v4
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
id-token: write id-token: write

View File

@ -7,7 +7,6 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -32,16 +31,11 @@ jobs:
env: env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
- name: Upgrade /web - name: Upgrade /web
working-directory: web working-directory: web/
run: | run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'` export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION npm i @goauthentik/api@$VERSION
- name: Upgrade /web/packages/sfe - uses: peter-evans/create-pull-request@v6
working-directory: web/packages/sfe
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v7
id: cpr id: cpr
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,46 +0,0 @@
name: authentik-ci-aws-cfn
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
check-changes-applied:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@v4
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
run: |
npm ci
- name: Check changes have been applied
run: |
poetry run make aws-cfn
git diff --exit-code
ci-aws-cfn-mark:
if: always()
needs:
- check-changes-applied
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}

View File

@ -1,28 +0,0 @@
---
name: authentik-ci-main-daily
on:
workflow_dispatch:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
jobs:
test-container:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- docs
- version-2024-12
- version-2024-10
steps:
- uses: actions/checkout@v4
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
${current}/scripts/test_docker.sh

View File

@ -43,26 +43,15 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run migrations - name: run migrations
run: poetry run python -m lifecycle.migrate run: poetry run python -m lifecycle.migrate
test-make-seed:
runs-on: ubuntu-latest
steps:
- id: seed
run: |
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
outputs:
seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable: test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20
needs: test-make-seed
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -104,23 +93,18 @@ jobs:
env: env:
# Test in the main database that we just migrated from the previous stable version # Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: | run: |
poetry run make ci-test poetry run make test
test-unittest: test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 30
needs: test-make-seed
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup authentik env - name: Setup authentik env
@ -128,23 +112,14 @@ jobs:
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: run unittest - name: run unittest
env:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: | run: |
poetry run make ci-test poetry run make test
poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: unit flags: unit
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: unit
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-integration: test-integration:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
@ -153,22 +128,16 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Create k8s Kind Cluster - name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0 uses: helm/kind-action@v1.10.0
- name: run integration - name: run integration
run: | run: |
poetry run coverage run manage.py test tests/integration poetry run coverage run manage.py test tests/integration
poetry run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: integration flags: integration
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: integration
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
test-e2e: test-e2e:
name: test-e2e (${{ matrix.job.name }}) name: test-e2e (${{ matrix.job.name }})
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -199,7 +168,7 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc) - name: Setup e2e env (chrome, etc)
run: | run: |
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull docker compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web - id: cache-web
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
@ -217,18 +186,11 @@ jobs:
poetry run coverage run manage.py test ${{ matrix.job.glob }} poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run coverage xml poetry run coverage xml
- if: ${{ always() }} - if: ${{ always() }}
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v4
with: with:
flags: e2e flags: e2e
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
flags: e2e
file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark: ci-core-mark:
if: always()
needs: needs:
- lint - lint
- test-migrations - test-migrations
@ -238,22 +200,59 @@ jobs:
- test-e2e - test-e2e
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}
build: build:
permissions: strategy:
# Needed to upload container images to ghcr.io fail-fast: false
packages: write matrix:
# Needed for attestation arch:
id-token: write - amd64
attestations: write - arm64
needs: ci-core-mark needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml runs-on: ubuntu-latest
secrets: inherit permissions:
with: # Needed to upload contianer images to ghcr.io
image_name: ghcr.io/goauthentik/dev-server packages: write
release: false timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max
platforms: linux/${{ matrix.arch }}
pr-comment: pr-comment:
needs: needs:
- build - build
@ -275,7 +274,6 @@ jobs:
with: with:
image-name: ghcr.io/goauthentik/dev-server image-name: ghcr.io/goauthentik/dev-server
- name: Comment on PR - name: Comment on PR
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: ./.github/actions/comment-pr-instructions uses: ./.github/actions/comment-pr-instructions
with: with:
tag: ${{ steps.ev.outputs.imageMainTag }} tag: gh-${{ steps.ev.outputs.imageMainTag }}

View File

@ -31,7 +31,7 @@ jobs:
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v6 uses: golangci/golangci-lint-action@v6
with: with:
version: latest version: v1.54.2
args: --timeout 5000s --verbose args: --timeout 5000s --verbose
skip-cache: true skip-cache: true
test-unittest: test-unittest:
@ -49,15 +49,12 @@ jobs:
run: | run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark: ci-outpost-mark:
if: always()
needs: needs:
- lint-golint - lint-golint
- test-unittest - test-unittest
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}
build-container: build-container:
timeout-minutes: 120 timeout-minutes: 120
needs: needs:
@ -72,17 +69,14 @@ jobs:
- rac - rac
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation
id-token: write
attestations: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.4.0 uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
@ -93,7 +87,7 @@ jobs:
with: with:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }} image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry - name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }} if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
@ -102,25 +96,17 @@ jobs:
- name: Generate API - name: Generate API
run: make gen-client-go run: make gen-client-go
- name: Build Docker Image - name: Build Docker Image
id: push
uses: docker/build-push-action@v6 uses: docker/build-push-action@v6
with: with:
tags: ${{ steps.ev.outputs.imageTags }} tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }} push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
build-args: | build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
context: . context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }} cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-binary: build-binary:
timeout-minutes: 120 timeout-minutes: 120
needs: needs:

View File

@ -24,11 +24,20 @@ jobs:
- prettier-check - prettier-check
project: project:
- web - web
- tests/wdio
include: include:
- command: tsc
project: web
- command: lit-analyse - command: lit-analyse
project: web project: web
extra_setup: |
# lit-analyse doesn't understand path rewrites, so make it
# belive it's an actual module
cd node_modules/@goauthentik
ln -s ../../src/ web
exclude:
- command: lint:lockfile
project: tests/wdio
- command: tsc
project: tests/wdio
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
@ -39,12 +48,21 @@ jobs:
- working-directory: ${{ matrix.project }}/ - working-directory: ${{ matrix.project }}/
run: | run: |
npm ci npm ci
${{ matrix.extra_setup }}
- name: Generate API - name: Generate API
run: make gen-client-ts run: make gen-client-ts
- name: Lint - name: Lint
working-directory: ${{ matrix.project }}/ working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }} run: npm run ${{ matrix.command }}
ci-web-mark:
needs:
- lint
runs-on: ubuntu-latest
steps:
- run: echo mark
build: build:
needs:
- ci-web-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -60,16 +78,6 @@ jobs:
- name: build - name: build
working-directory: web/ working-directory: web/
run: npm run build run: npm run build
ci-web-mark:
if: always()
needs:
- build
- lint
runs-on: ubuntu-latest
steps:
- uses: re-actors/alls-green@release/v1
with:
jobs: ${{ toJSON(needs) }}
test: test:
needs: needs:
- ci-web-mark - ci-web-mark
@ -87,4 +95,4 @@ jobs:
run: make gen-client-ts run: make gen-client-ts
- name: test - name: test
working-directory: web/ working-directory: web/
run: npm run test || exit 0 run: npm run test

View File

@ -62,13 +62,10 @@ jobs:
working-directory: website/ working-directory: website/
run: npm run ${{ matrix.job }} run: npm run ${{ matrix.job }}
ci-website-mark: ci-website-mark:
if: always()
needs: needs:
- lint - lint
- test - test
- build - build
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - run: echo mark
with:
jobs: ${{ toJSON(needs) }}

View File

@ -11,7 +11,6 @@ env:
jobs: jobs:
build: build:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -25,7 +24,7 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- run: poetry run ak update_webauthn_mds - run: poetry run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v7 - uses: peter-evans/create-pull-request@v6
id: cpr id: cpr
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -7,7 +7,6 @@ on:
jobs: jobs:
clean-ghcr: clean-ghcr:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
name: Delete old unused container images name: Delete old unused container images
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:

View File

@ -42,7 +42,7 @@ jobs:
with: with:
githubToken: ${{ steps.generate_token.outputs.token }} githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }} compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v7 - uses: peter-evans/create-pull-request@v6
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr id: cpr
with: with:

View File

@ -12,7 +12,6 @@ env:
jobs: jobs:
publish-source-docs: publish-source-docs:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120 timeout-minutes: 120
steps: steps:

View File

@ -11,7 +11,6 @@ permissions:
jobs: jobs:
update-next: update-next:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
environment: internal-production environment: internal-production
steps: steps:

View File

@ -7,27 +7,53 @@ on:
jobs: jobs:
build-server: build-server:
uses: ./.github/workflows/_reusable-docker-build.yaml runs-on: ubuntu-latest
secrets: inherit
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation steps:
id-token: write - uses: actions/checkout@v4
attestations: write - name: Set up QEMU
with: uses: docker/setup-qemu-action@v3.0.0
image_name: ghcr.io/goauthentik/server,beryju/authentik - name: Set up Docker Buildx
release: true uses: docker/setup-buildx-action@v3
registry_dockerhub: true - name: prepare variables
registry_ghcr: true uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image
uses: docker/build-push-action@v6
with:
context: .
push: true
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/amd64,linux/arm64
build-outpost: build-outpost:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload contianer images to ghcr.io
packages: write packages: write
# Needed for attestation
id-token: write
attestations: write
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -42,7 +68,7 @@ jobs:
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.4.0 uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: prepare variables
@ -69,21 +95,12 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image - name: Build Docker Image
uses: docker/build-push-action@v6 uses: docker/build-push-action@v6
id: push
with: with:
push: true push: true
build-args: |
VERSION=${{ github.ref }}
tags: ${{ steps.ev.outputs.imageTags }} tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
context: . context: .
- uses: actions/attest-build-provenance@v2
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost-binary: build-outpost-binary:
timeout-minutes: 120 timeout-minutes: 120
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -128,27 +145,6 @@ jobs:
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }} tag: ${{ github.ref }}
upload-aws-cfn-template:
permissions:
# Needed for AWS login
id-token: write
contents: read
needs:
- build-server
- build-outpost
env:
AWS_REGION: eu-central-1
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
- name: Upload template
run: |
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release: test-release:
needs: needs:
- build-server - build-server
@ -182,8 +178,8 @@ jobs:
image-name: ghcr.io/goauthentik/server image-name: ghcr.io/goauthentik/server
- name: Get static files from docker image - name: Get static files from docker image
run: | run: |
docker pull ${{ steps.ev.outputs.imageMainName }} docker pull ${{ steps.ev.outputs.imageMainTag }}
container=$(docker container create ${{ steps.ev.outputs.imageMainName }}) container=$(docker container create ${{ steps.ev.outputs.imageMainTag }})
docker cp ${container}:web/ . docker cp ${container}:web/ .
- name: Create a Sentry.io release - name: Create a Sentry.io release
uses: getsentry/action-release@v1 uses: getsentry/action-release@v1

View File

@ -14,7 +14,16 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Pre-release test - name: Pre-release test
run: | run: |
make test-docker echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
- id: generate_token - id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:

View File

@ -1,21 +0,0 @@
name: "authentik-repo-mirror"
on: [push, delete]
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: pixta-dev/repository-mirroring-action@v1
with:
target_repo_url:
git@github.com:goauthentik/authentik-internal.git
ssh_private_key:
${{ secrets.GH_MIRROR_KEY }}
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,8 +1,8 @@
name: "authentik-repo-stale" name: 'authentik-repo-stale'
on: on:
schedule: schedule:
- cron: "30 1 * * *" - cron: '30 1 * * *'
workflow_dispatch: workflow_dispatch:
permissions: permissions:
@ -11,7 +11,6 @@ permissions:
jobs: jobs:
stale: stale:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -25,7 +24,7 @@ jobs:
days-before-stale: 60 days-before-stale: 60
days-before-close: 7 days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
stale-issue-label: status/stale stale-issue-label: wontfix
stale-issue-message: > stale-issue-message: >
This issue has been automatically marked as stale because it has not had This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you recent activity. It will be closed if no further activity occurs. Thank you

View File

@ -32,7 +32,7 @@ jobs:
poetry run ak compilemessages poetry run ak compilemessages
make web-check-compile make web-check-compile
- name: Create Pull Request - name: Create Pull Request
uses: peter-evans/create-pull-request@v7 uses: peter-evans/create-pull-request@v6
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation branch: extract-compile-backend-translation

3
.gitignore vendored
View File

@ -209,6 +209,3 @@ source_docs/
### Golang ### ### Golang ###
/vendor/ /vendor/
### Docker ###
docker-compose.override.yml

View File

@ -2,7 +2,6 @@
"recommendations": [ "recommendations": [
"bashmish.es6-string-css", "bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting", "bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"charliermarsh.ruff",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig", "EditorConfig.EditorConfig",
"esbenp.prettier-vscode", "esbenp.prettier-vscode",
@ -11,10 +10,10 @@
"Gruntfuggly.todo-tree", "Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv", "mechatroner.rainbow-csv",
"ms-python.black-formatter", "ms-python.black-formatter",
"ms-python.black-formatter", "charliermarsh.ruff",
"ms-python.debugpy",
"ms-python.python", "ms-python.python",
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml", "redhat.vscode-yaml",
"Tobermory.es6-string-html", "Tobermory.es6-string-html",
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx",

66
.vscode/launch.json vendored
View File

@ -2,76 +2,26 @@
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{ {
"name": "Debug: Attach Server Core", "name": "Python: PDB attach Server",
"type": "debugpy", "type": "python",
"request": "attach", "request": "attach",
"connect": { "connect": {
"host": "localhost", "host": "localhost",
"port": 9901 "port": 6800
}, },
"pathMappings": [ "justMyCode": true,
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"django": true "django": true
}, },
{ {
"name": "Debug: Attach Worker", "name": "Python: PDB attach Worker",
"type": "debugpy", "type": "python",
"request": "attach", "request": "attach",
"connect": { "connect": {
"host": "localhost", "host": "localhost",
"port": 9901 "port": 6900
}, },
"pathMappings": [ "justMyCode": true,
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}
],
"django": true "django": true
}, },
{
"name": "Debug: Start Server Router",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/server",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start LDAP Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/ldap",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Proxy Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/proxy",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start RAC Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/rac",
"cwd": "${workspaceFolder}"
},
{
"name": "Debug: Start Radius Outpost",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/radius",
"cwd": "${workspaceFolder}"
}
] ]
} }

23
.vscode/settings.json vendored
View File

@ -6,7 +6,6 @@
"authn", "authn",
"entra", "entra",
"goauthentik", "goauthentik",
"jwe",
"jwks", "jwks",
"kubernetes", "kubernetes",
"oidc", "oidc",
@ -19,22 +18,20 @@
"sso", "sso",
"totp", "totp",
"traefik", "traefik",
"webauthn" "webauthn",
], ],
"todo-tree.tree.showCountsInTree": true, "todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true, "todo-tree.tree.showBadges": true,
"yaml.customTags": [ "yaml.customTags": [
"!Condition sequence",
"!Context scalar",
"!Enumerate sequence",
"!Env scalar",
"!Find sequence", "!Find sequence",
"!Format sequence",
"!If sequence",
"!Index scalar",
"!KeyOf scalar", "!KeyOf scalar",
"!Value scalar", "!Context scalar",
"!AtIndex scalar" "!Context sequence",
"!Format sequence",
"!Condition sequence",
"!Env sequence",
"!Env scalar",
"!If sequence"
], ],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index", "typescript.preferences.importModuleSpecifierEnding": "index",
@ -51,7 +48,9 @@
"ignoreCase": false "ignoreCase": false
} }
], ],
"go.testFlags": ["-count=1"], "go.testFlags": [
"-count=1"
],
"github-actions.workflows.pinned.workflows": [ "github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml" ".github/workflows/ci-main.yml"
] ]

62
.vscode/tasks.json vendored
View File

@ -2,67 +2,85 @@
"version": "2.0.0", "version": "2.0.0",
"tasks": [ "tasks": [
{ {
"label": "authentik/core: make", "label": "authentik[core]: format & test",
"command": "poetry", "command": "poetry",
"args": ["run", "make", "lint-fix", "lint"], "args": [
"presentation": { "run",
"panel": "new" "make"
}, ],
"group": "test" "group": "build",
}, },
{ {
"label": "authentik/core: run", "label": "authentik[core]: run",
"command": "poetry", "command": "poetry",
"args": ["run", "ak", "server"], "args": [
"run",
"make",
"run",
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik/web: make", "label": "authentik[web]: format",
"command": "make", "command": "make",
"args": ["web"], "args": ["web"],
"group": "build" "group": "build",
}, },
{ {
"label": "authentik/web: watch", "label": "authentik[web]: watch",
"command": "make", "command": "make",
"args": ["web-watch"], "args": ["web-watch"],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik: install", "label": "authentik: install",
"command": "make", "command": "make",
"args": ["install", "-j4"], "args": ["install"],
"group": "build" "group": "build",
}, },
{ {
"label": "authentik/website: make", "label": "authentik: i18n-extract",
"command": "poetry",
"args": [
"run",
"make",
"i18n-extract"
],
"group": "build",
},
{
"label": "authentik[website]: format",
"command": "make", "command": "make",
"args": ["website"], "args": ["website"],
"group": "build" "group": "build",
}, },
{ {
"label": "authentik/website: watch", "label": "authentik[website]: watch",
"command": "make", "command": "make",
"args": ["website-watch"], "args": ["website-watch"],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
"group": "running" "group": "running"
} },
}, },
{ {
"label": "authentik/api: generate", "label": "authentik[api]: generate",
"command": "poetry", "command": "poetry",
"args": ["run", "make", "gen"], "args": [
"run",
"make",
"gen"
],
"group": "build" "group": "build"
} },
] ]
} }

View File

@ -15,23 +15,14 @@ go.mod @goauthentik/backend
go.sum @goauthentik/backend go.sum @goauthentik/backend
# Infrastructure # Infrastructure
.github/ @goauthentik/infrastructure .github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure *Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure .dockerignore @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure docker-compose.yml @goauthentik/infrastructure
Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Web # Web
web/ @goauthentik/frontend web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend tests/wdio/ @goauthentik/frontend
# Locale
locale/ @goauthentik/backend @goauthentik/frontend
web/xliff/ @goauthentik/backend @goauthentik/frontend
# Docs & Website # Docs & Website
website/ @goauthentik/docs website/ @goauthentik/docs
CODE_OF_CONDUCT.md @goauthentik/docs
# Security # Security
SECURITY.md @goauthentik/security @goauthentik/docs website/docs/security/ @goauthentik/security
website/docs/security/ @goauthentik/security @goauthentik/docs

View File

@ -1 +1 @@
website/docs/developer-docs/index.md website/developer-docs/index.md

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
# Stage 1: Build website # Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
ENV NODE_ENV=production ENV NODE_ENV=production
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
RUN npm run build-bundled RUN npm run build-bundled
# Stage 2: Build webui # Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
ARG GIT_BUILD_HASH ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
@ -30,7 +30,6 @@ WORKDIR /work/web
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \ --mount=type=bind,target=/work/web/scripts,src=./web/scripts \
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \ --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
npm ci --include=dev npm ci --include=dev
@ -43,7 +42,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build RUN npm run build
# Stage 3: Build go proxy # Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
@ -80,7 +79,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
go build -o /go/authentik ./cmd/server go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP # Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1" ENV GEOIPUPDATE_VERBOSE="1"
@ -94,10 +93,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies # Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps
ARG TARGETARCH
ARG TARGETVARIANT
WORKDIR /ak-root/poetry WORKDIR /ak-root/poetry
@ -110,57 +106,40 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \ RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \ apt-get update && \
# Required for installing pip packages # Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \ --mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \ --mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \ --mount=type=cache,target=/root/.cache/pypoetry \
pip install --no-cache cffi && \
apt-get update && \
apt-get install -y --no-install-recommends \
build-essential libffi-dev \
# Required for cryptography
curl pkg-config \
# Required for lxml
libxslt-dev zlib1g-dev \
# Required for xmlsec
libltdl-dev \
# Required for kadmin
sccache clang && \
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
. "$HOME/.cargo/env" && \
python -m venv /ak-root/venv/ && \ python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \ bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip poetry && \ pip3 install --upgrade pip && \
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \ pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction --no-root && \ poetry install --only=main --no-ansi --no-interaction --no-root && \
pip uninstall cryptography -y && \ pip install --force-reinstall /wheels/*"
poetry install --only=main --no-ansi --no-interaction --no-root"
# Stage 6: Run # Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image
ARG VERSION
ARG GIT_BUILD_HASH ARG GIT_BUILD_HASH
ARG VERSION
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
LABEL org.opencontainers.image.url=https://goauthentik.io LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version=${VERSION} LABEL org.opencontainers.image.version ${VERSION}
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH} LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
WORKDIR / WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image # We cannot cache this layer otherwise we'll end up with a bigger image
RUN apt-get update && \ RUN apt-get update && \
apt-get upgrade -y && \
# Required for runtime # Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \ apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
# Required for bootstrap & healtcheck # Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \ apt-get install -y --no-install-recommends runit && \
pip3 install --no-cache-dir --upgrade pip && \
apt-get clean && \ apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \ adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
@ -178,7 +157,6 @@ COPY ./tests /tests
COPY ./manage.py / COPY ./manage.py /
COPY ./blueprints /blueprints COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/venv /ak-root/venv COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=web-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/dist/ /web/dist/
@ -193,8 +171,9 @@ ENV TMPDIR=/dev/shm/ \
PYTHONUNBUFFERED=1 \ PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \ PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \ VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \ POETRY_VIRTUALENVS_CREATE=false
GOFIPS=1
ENV GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ] HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]

View File

@ -6,8 +6,6 @@ UID = $(shell id -u)
GID = $(shell id -g) GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version) NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github PY_SOURCES = authentik tests scripts lifecycle .github
GO_SOURCES = cmd internal
WEB_SOURCES = web/src web/packages
DOCKER_IMAGE ?= "authentik:test" DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api" GEN_API_TS = "gen-ts-api"
@ -22,13 +20,13 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \ -I .github/codespell-words.txt \
-S 'web/src/locales/**' \ -S 'web/src/locales/**' \
-S 'website/developer-docs/api/reference/**' \ -S 'website/developer-docs/api/reference/**' \
-S '**/node_modules/**' \ authentik \
-S '**/dist/**' \ internal \
$(PY_SOURCES) \ cmd \
$(GO_SOURCES) \ web/src \
$(WEB_SOURCES) \
website/src \ website/src \
website/blog \ website/blog \
website/developer-docs \
website/docs \ website/docs \
website/integrations \ website/integrations \
website/src website/src
@ -45,9 +43,18 @@ help: ## Show this help
sort sort
@echo "" @echo ""
go-test: test-go:
go test -timeout 0 -v -race -cover ./... go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally) test: ## Run the server tests and produce a coverage report (locally)
coverage run manage.py test --keepdb authentik coverage run manage.py test --keepdb authentik
coverage html coverage html
@ -72,9 +79,6 @@ migrate: ## Run the Authentik Django server's migrations
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn:
cd lifecycle/aws && npm run aws-cfn
core-i18n-extract: core-i18n-extract:
ak makemessages \ ak makemessages \
--add-location file \ --add-location file \
@ -146,7 +150,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
docker run \ docker run \
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
-i /local/schema.yml \ -i /local/schema.yml \
-g typescript-fetch \ -g typescript-fetch \
-o /local/${GEN_API_TS} \ -o /local/${GEN_API_TS} \
@ -206,9 +210,6 @@ web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting is
web-install: ## Install the necessary libraries to build the Authentik UI web-install: ## Install the necessary libraries to build the Authentik UI
cd web && npm ci cd web && npm ci
web-test: ## Run tests for the Authentik UI
cd web && npm run test
web-watch: ## Build and watch the Authentik UI for changes, updating automatically web-watch: ## Build and watch the Authentik UI for changes, updating automatically
rm -rf web/dist/ rm -rf web/dist/
mkdir web/dist/ mkdir web/dist/
@ -257,9 +258,6 @@ docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS} mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker:
BUILD=true ./scripts/test_docker.sh
######################### #########################
## CI ## CI
######################### #########################
@ -284,8 +282,3 @@ ci-bandit: ci--meta-debug
ci-pending-migrations: ci--meta-debug ci-pending-migrations: ci--meta-debug
ak makemigrations --check ak makemigrations --check
ci-test: ci--meta-debug
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
coverage report
coverage xml

View File

@ -15,9 +15,7 @@
## What is authentik? ## What is authentik?
authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols. authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use.
## Installation ## Installation
@ -34,7 +32,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Development ## Development
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security ## Security

View File

@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
## Independent audits and pentests ## Independent audits and pentests
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specfic audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security). In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
## What authentik classifies as a CVE ## What authentik classifies as a CVE
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version) (.x being the latest patch release for each version)
| Version | Supported | | Version | Supported |
| --------- | --------- | | -------- | --------- |
| 2024.10.x | ✅ | | 2024.4.x | ✅ |
| 2024.12.x | ✅ | | 2024.6.x | ✅ |
## Reporting a Vulnerability ## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ from os import environ
__version__ = "2024.12.3" __version__ = "2024.6.0"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
@ -16,5 +16,5 @@ def get_full_version() -> str:
"""Get full version, with build hash appended""" """Get full version, with build hash appended"""
version = __version__ version = __version__
if (build_hash := get_build_hash()) != "": if (build_hash := get_build_hash()) != "":
return f"{version}+{build_hash}" version += "." + build_hash
return version return version

View File

@ -7,9 +7,7 @@ from sys import version as python_version
from typing import TypedDict from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend from cryptography.hazmat.backends.openssl.backend import backend
from django.conf import settings
from django.utils.timezone import now from django.utils.timezone import now
from django.views.debug import SafeExceptionReporterFilter
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request from rest_framework.request import Request
@ -54,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer):
def get_http_headers(self, request: Request) -> dict[str, str]: def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers""" """Get HTTP Request headers"""
headers = {} headers = {}
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
for key, value in request.META.items(): for key, value in request.META.items():
if not isinstance(value, str): if not isinstance(value, str):
continue continue
actual_value = value headers[key] = value
if raw_session in actual_value:
actual_value = actual_value.replace(
raw_session, SafeExceptionReporterFilter.cleansed_substitute
)
headers[key] = actual_value
return headers return headers
def get_http_host(self, request: Request) -> str: def get_http_host(self, request: Request) -> str:
@ -81,7 +73,7 @@ class SystemInfoSerializer(PassiveSerializer):
"authentik_version": get_full_version(), "authentik_version": get_full_version(),
"environment": get_env(), "environment": get_env(),
"openssl_fips_enabled": ( "openssl_fips_enabled": (
backend._fips_enabled if LicenseKey.get_total().status().is_valid else None backend._fips_enabled if LicenseKey.get_total().is_valid() else None
), ),
"openssl_version": OPENSSL_VERSION, "openssl_version": OPENSSL_VERSION,
"platform": platform.platform(), "platform": platform.platform(),

View File

@ -12,7 +12,6 @@ from rest_framework.views import APIView
from authentik import __version__, get_build_hash from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.models import Outpost
class VersionSerializer(PassiveSerializer): class VersionSerializer(PassiveSerializer):
@ -23,7 +22,6 @@ class VersionSerializer(PassiveSerializer):
version_latest_valid = SerializerMethodField() version_latest_valid = SerializerMethodField()
build_hash = SerializerMethodField() build_hash = SerializerMethodField()
outdated = SerializerMethodField() outdated = SerializerMethodField()
outpost_outdated = SerializerMethodField()
def get_build_hash(self, _) -> str: def get_build_hash(self, _) -> str:
"""Get build hash, if version is not latest or released""" """Get build hash, if version is not latest or released"""
@ -49,15 +47,6 @@ class VersionSerializer(PassiveSerializer):
"""Check if we're running the latest version""" """Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance)) return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
def get_outpost_outdated(self, _) -> bool:
"""Check if any outpost is outdated/has a version mismatch"""
any_outdated = False
for outpost in Outpost.objects.all():
for state in outpost.state:
if state.version_outdated:
any_outdated = True
return any_outdated
class VersionView(APIView): class VersionView(APIView):
"""Get running and latest version.""" """Get running and latest version."""

View File

@ -1,33 +0,0 @@
from rest_framework.permissions import IsAdminUser
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.admin.models import VersionHistory
from authentik.core.api.utils import ModelSerializer
class VersionHistorySerializer(ModelSerializer):
"""VersionHistory Serializer"""
class Meta:
model = VersionHistory
fields = [
"id",
"timestamp",
"version",
"build",
]
class VersionHistoryViewSet(ReadOnlyModelViewSet):
"""VersionHistory Viewset"""
queryset = VersionHistory.objects.all()
serializer_class = VersionHistorySerializer
permission_classes = [IsAdminUser]
filterset_fields = [
"version",
"build",
]
search_fields = ["version", "build"]
ordering = ["-timestamp"]
pagination_class = None

View File

@ -1,16 +1,12 @@
"""authentik administration overview""" """authentik administration overview"""
from socket import gethostname
from django.conf import settings from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer from drf_spectacular.utils import extend_schema, inline_serializer
from packaging.version import parse from rest_framework.fields import IntegerField
from rest_framework.fields import BooleanField, CharField
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
from authentik import get_full_version
from authentik.rbac.permissions import HasPermission from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP from authentik.root.celery import CELERY_APP
@ -20,38 +16,11 @@ class WorkerView(APIView):
permission_classes = [HasPermission("authentik_rbac.view_system_info")] permission_classes = [HasPermission("authentik_rbac.view_system_info")]
@extend_schema( @extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
responses=inline_serializer(
"Worker",
fields={
"worker_id": CharField(),
"version": CharField(),
"version_matching": BooleanField(),
},
many=True,
)
)
def get(self, request: Request) -> Response: def get(self, request: Request) -> Response:
"""Get currently connected worker count.""" """Get currently connected worker count."""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) count = len(CELERY_APP.control.ping(timeout=0.5))
our_version = parse(get_full_version())
response = []
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == our_version
response.append(
{"worker_id": key, "version": version, "version_matching": version_matching}
)
# In debug we run with `task_always_eager`, so tasks are ran on the main process # In debug we run with `task_always_eager`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover if settings.DEBUG: # pragma: no cover
response.append( count += 1
{ return Response({"count": count})
"worker_id": f"authentik-debug@{gethostname()}",
"version": get_full_version(),
"version_matching": True,
}
)
return Response(response)

View File

@ -1,10 +1,11 @@
"""authentik admin app config""" """authentik admin app config"""
from prometheus_client import Info from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig from authentik.blueprints.apps import ManagedAppConfig
PROM_INFO = Info("authentik_version", "Currently running authentik version") PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class AuthentikAdminConfig(ManagedAppConfig): class AuthentikAdminConfig(ManagedAppConfig):

View File

@ -1,22 +0,0 @@
"""authentik admin models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
class VersionHistory(models.Model):
id = models.BigAutoField(primary_key=True)
timestamp = models.DateTimeField()
version = models.TextField()
build = models.TextField()
class Meta:
managed = False
db_table = "authentik_version_history"
ordering = ("-timestamp",)
verbose_name = _("Version history")
verbose_name_plural = _("Version history")
default_permissions = []
def __str__(self):
return f"{self.version}.{self.build} ({self.timestamp})"

View File

@ -1,35 +1,14 @@
"""admin signals""" """admin signals"""
from django.dispatch import receiver from django.dispatch import receiver
from packaging.version import parse
from prometheus_client import Gauge
from authentik import get_full_version from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set from authentik.root.monitoring import monitoring_set
GAUGE_WORKERS = Gauge(
"authentik_admin_workers",
"Currently connected workers, their versions and if they are the same version as authentik",
["version", "version_matched"],
)
_version = parse(get_full_version())
@receiver(monitoring_set) @receiver(monitoring_set)
def monitoring_set_workers(sender, **kwargs): def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge""" """Set worker gauge"""
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5) count = len(CELERY_APP.control.ping(timeout=0.5))
worker_version_count = {} GAUGE_WORKERS.set(count)
for worker in raw:
key = list(worker.keys())[0]
version = worker[key].get("version")
version_matching = False
if version:
version_matching = parse(version) == _version
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
worker_version_count[version]["count"] += 1
for version, stats in worker_version_count.items():
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])

View File

@ -1,8 +1,10 @@
"""authentik admin tasks""" """authentik admin tasks"""
import re
from django.core.cache import cache from django.core.cache import cache
from django.core.validators import URLValidator
from django.db import DatabaseError, InternalError, ProgrammingError from django.db import DatabaseError, InternalError, ProgrammingError
from django.utils.translation import gettext_lazy as _
from packaging.version import parse from packaging.version import parse
from requests import RequestException from requests import RequestException
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
@ -19,6 +21,8 @@ LOGGER = get_logger()
VERSION_NULL = "0.0.0" VERSION_NULL = "0.0.0"
VERSION_CACHE_KEY = "authentik_latest_version" VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
URL_FINDER = URLValidator.regex.pattern[1:]
LOCAL_VERSION = parse(__version__) LOCAL_VERSION = parse(__version__)
@ -74,16 +78,10 @@ def update_latest_version(self: SystemTask):
context__new_version=upstream_version, context__new_version=upstream_version,
).exists(): ).exists():
return return
Event.new( event_dict = {"new_version": upstream_version}
EventAction.UPDATE_AVAILABLE, if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
message=_( event_dict["message"] = f"Changelog: {match.group()}"
"New version {version} available!".format( Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
version=upstream_version,
)
),
new_version=upstream_version,
changelog=data.get("stable", {}).get("changelog_url"),
).save()
except (RequestException, IndexError) as exc: except (RequestException, IndexError) as exc:
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT) cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
self.set_error(exc) self.set_error(exc)

View File

@ -34,7 +34,7 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:admin_workers")) response = self.client.get(reverse("authentik_api:admin_workers"))
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
body = loads(response.content) body = loads(response.content)
self.assertEqual(len(body), 0) self.assertEqual(body["count"], 0)
def test_metrics(self): def test_metrics(self):
"""Test metrics API""" """Test metrics API"""

View File

@ -17,7 +17,6 @@ RESPONSE_VALID = {
"stable": { "stable": {
"version": "99999999.9999999", "version": "99999999.9999999",
"changelog": "See https://goauthentik.io/test", "changelog": "See https://goauthentik.io/test",
"changelog_url": "https://goauthentik.io/test",
"reason": "bugfix", "reason": "bugfix",
}, },
} }
@ -36,7 +35,7 @@ class TestAdminTasks(TestCase):
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999", context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!", context__message="Changelog: https://goauthentik.io/test",
).exists() ).exists()
) )
# test that a consecutive check doesn't create a duplicate event # test that a consecutive check doesn't create a duplicate event
@ -46,7 +45,7 @@ class TestAdminTasks(TestCase):
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999", context__new_version="99999999.9999999",
context__message="New version 99999999.9999999 available!", context__message="Changelog: https://goauthentik.io/test",
) )
), ),
1, 1,

View File

@ -6,7 +6,6 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet
from authentik.admin.api.workers import WorkerView from authentik.admin.api.workers import WorkerView
api_urlpatterns = [ api_urlpatterns = [
@ -18,7 +17,6 @@ api_urlpatterns = [
name="admin_metrics", name="admin_metrics",
), ),
path("admin/version/", VersionView.as_view(), name="admin_version"), path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"), path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"), path("admin/system/", SystemView.as_view(), name="admin_system"),
] ]

View File

@ -0,0 +1,67 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
"""Filter objects by their owner"""
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)
class OwnerPermissions(BasePermission):
"""Authorize requests by an object's owner matching the requesting user"""
owner_key = "user"
def has_permission(self, request: Request, view) -> bool:
"""If the user is authenticated, we allow all requests here. For listing, the
object-level permissions are done by the filter backend"""
return request.user.is_authenticated
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
"""Check if the object's owner matches the currently logged in user"""
if not hasattr(obj, self.owner_key):
return False
owner = getattr(obj, self.owner_key)
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -7,7 +7,7 @@ API Browser - {{ brand.branding_title }}
{% endblock %} {% endblock %}
{% block head %} {% block head %}
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script> {% versioned_script "dist/standalone/api-browser/index-%v.js" %}
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
{% endblock %} {% endblock %}

View File

@ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {} context = self.instance.context if self.instance else {}
valid, logs = Importer.from_string(content, context).validate() valid, logs = Importer.from_string(content, context).validate()
if not valid: if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError( raise ValidationError(
[ _("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
_("Failed to validate blueprint"),
*[f"- {x.event}" for x in logs],
]
) )
return content return content

View File

@ -23,11 +23,9 @@ class Command(BaseCommand):
for blueprint_path in options.get("blueprints", []): for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve() content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer.from_string(content) importer = Importer.from_string(content)
valid, logs = importer.validate() valid, _ = importer.validate()
if not valid: if not valid:
self.stderr.write("Blueprint invalid") self.stderr.write("blueprint invalid")
for log in logs:
self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}")
sys_exit(1) sys_exit(1)
importer.apply() importer.apply()

View File

@ -1,68 +0,0 @@
"""Test and debug Blueprints"""
import atexit
import readline
from pathlib import Path
from pprint import pformat
from sys import exit as sysexit
from textwrap import indent
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from yaml import load
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
from authentik.core.management.commands.shell import get_banner_text
from authentik.lib.utils.errors import exception_to_string
LOGGER = get_logger()
class Command(BaseCommand):
"""Test and debug Blueprints"""
lines = []
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
histfolder.mkdir(parents=True, exist_ok=True)
histfile = histfolder / Path("blueprint_shell_history")
readline.parse_and_bind("tab: complete")
readline.parse_and_bind("set editing-mode vi")
try:
readline.read_history_file(str(histfile))
except FileNotFoundError:
pass
atexit.register(readline.write_history_file, str(histfile))
@no_translations
def handle(self, *args, **options):
"""Interactively debug blueprint files"""
self.stdout.write(get_banner_text("Blueprint shell"))
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
def do_eval():
yaml_input = "\n".join([line for line in self.lines if line])
data = load(yaml_input, BlueprintLoader)
self.stdout.write(pformat(data))
self.lines = []
while True:
try:
line = input("> ")
if line == ".eval":
do_eval()
else:
self.lines.append(line)
except EntryInvalidError as exc:
self.stdout.write("Failed to evaluate expression:")
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
except EOFError:
break
except KeyboardInterrupt:
self.stdout.write()
sysexit(0)
self.stdout.write()

View File

@ -113,20 +113,17 @@ class Command(BaseCommand):
) )
model_path = f"{model._meta.app_label}.{model._meta.model_name}" model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["properties"]["entries"]["items"]["oneOf"].append( self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, model, serializer) self.template_entry(model_path, serializer)
) )
def template_entry(self, model_path: str, model: type[Model], serializer: Serializer) -> dict: def template_entry(self, model_path: str, serializer: Serializer) -> dict:
"""Template entry for a single model""" """Template entry for a single model"""
model_schema = self.to_jsonschema(serializer) model_schema = self.to_jsonschema(serializer)
model_schema["required"] = [] model_schema["required"] = []
def_name = f"model_{model_path}" def_name = f"model_{model_path}"
def_path = f"#/$defs/{def_name}" def_path = f"#/$defs/{def_name}"
self.schema["$defs"][def_name] = model_schema self.schema["$defs"][def_name] = model_schema
def_name_perm = f"model_{model_path}_permissions" return {
def_path_perm = f"#/$defs/{def_name_perm}"
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
template = {
"type": "object", "type": "object",
"required": ["model", "identifiers"], "required": ["model", "identifiers"],
"properties": { "properties": {
@ -138,16 +135,10 @@ class Command(BaseCommand):
"default": "present", "default": "present",
}, },
"conditions": {"type": "array", "items": {"type": "boolean"}}, "conditions": {"type": "array", "items": {"type": "boolean"}},
"permissions": {"$ref": def_path_perm},
"attrs": {"$ref": def_path}, "attrs": {"$ref": def_path},
"identifiers": {"$ref": def_path}, "identifiers": {"$ref": def_path},
}, },
} }
# Meta models don't require identifiers, as there's no matching database model to find
if issubclass(model, BaseMetaModel):
del template["properties"]["identifiers"]
template["required"].remove("identifiers")
return template
def field_to_jsonschema(self, field: Field) -> dict: def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema""" """Convert a single field to json schema"""
@ -194,20 +185,3 @@ class Command(BaseCommand):
if required: if required:
result["required"] = required result["required"] = required
return result return result
def model_permissions(self, model: type[Model]) -> dict:
perms = [x[0] for x in model._meta.permissions]
for action in model._meta.default_permissions:
perms.append(f"{action}_{model._meta.model_name}")
return {
"type": "array",
"items": {
"type": "object",
"required": ["permission"],
"properties": {
"permission": {"type": "string", "enum": perms},
"user": {"type": "integer"},
"role": {"type": "string"},
},
},
}

View File

@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
if version != 1: if version != 1:
return return
blueprint_file.seek(0) blueprint_file.seek(0)
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first() instance: BlueprintInstance = (
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
)
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
meta = None meta = None
if metadata: if metadata:

View File

@ -1,24 +0,0 @@
version: 1
entries:
- model: authentik_core.user
id: user
identifiers:
username: "%(id)s"
attrs:
name: "%(id)s"
- model: authentik_rbac.role
id: role
identifiers:
name: "%(id)s"
- model: authentik_flows.flow
identifiers:
slug: "%(id)s"
attrs:
designation: authentication
name: foo
title: foo
permissions:
- permission: view_flow
user: !KeyOf user
- permission: view_flow
role: !KeyOf role

View File

@ -1,8 +0,0 @@
version: 1
entries:
- model: authentik_rbac.role
identifiers:
name: "%(id)s"
attrs:
permissions:
- authentik_blueprints.view_blueprintinstance

View File

@ -1,9 +0,0 @@
version: 1
entries:
- model: authentik_core.user
identifiers:
username: "%(id)s"
attrs:
name: "%(id)s"
permissions:
- authentik_blueprints.view_blueprintinstance

View File

@ -146,10 +146,6 @@ entries:
] ]
] ]
nested_context: !Context context2 nested_context: !Context context2
at_index_sequence: !AtIndex [!Context sequence, 0]
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
at_index_mapping: !AtIndex [!Context mapping, "key2"]
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
identifiers: identifiers:
name: test name: test
conditions: conditions:

View File

@ -27,8 +27,7 @@ def blueprint_tester(file_name: Path) -> Callable:
base = Path("blueprints/") base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base) rel_path = Path(file_name).relative_to(base)
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve()) importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
validation, logs = importer.validate() self.assertTrue(importer.validate()[0])
self.assertTrue(validation, logs)
self.assertTrue(importer.apply()) self.assertTrue(importer.apply())
return tester return tester

View File

@ -215,10 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
}, },
"nested_context": "context-nested-value", "nested_context": "context-nested-value",
"env_null": None, "env_null": None,
"at_index_sequence": "foo",
"at_index_sequence_default": "non existent",
"at_index_mapping": 2,
"at_index_mapping_default": "non existent",
} }
).exists() ).exists()
) )

View File

@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
self.assertEqual(res.status_code, 400) self.assertEqual(res.status_code, 400)
self.assertJSONEqual( self.assertJSONEqual(
res.content.decode(), res.content.decode(),
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]}, {"content": ["Failed to validate blueprint: Invalid blueprint version"]},
) )

View File

@ -1,57 +0,0 @@
"""Test blueprints v1"""
from django.test import TransactionTestCase
from guardian.shortcuts import get_perms
from authentik.blueprints.v1.importer import Importer
from authentik.core.models import User
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.rbac.models import Role
class TestBlueprintsV1RBAC(TransactionTestCase):
"""Test Blueprints rbac attribute"""
def test_user_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_user.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
user = User.objects.filter(username=uid).first()
self.assertIsNotNone(user)
self.assertTrue(user.has_perms(["authentik_blueprints.view_blueprintinstance"]))
def test_role_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_role.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
role = Role.objects.filter(name=uid).first()
self.assertIsNotNone(role)
self.assertEqual(
list(role.group.permissions.all().values_list("codename", flat=True)),
["view_blueprintinstance"],
)
def test_object_permission(self):
"""Test permissions"""
uid = generate_id()
import_yaml = load_fixture("fixtures/rbac_object.yaml", id=uid)
importer = Importer.from_string(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
flow = Flow.objects.filter(slug=uid).first()
user = User.objects.filter(username=uid).first()
role = Role.objects.filter(name=uid).first()
self.assertIsNotNone(flow)
self.assertEqual(get_perms(user, flow), ["view_flow"])
self.assertEqual(get_perms(role.group, flow), ["view_flow"])

View File

@ -1,7 +1,7 @@
"""transfer common classes""" """transfer common classes"""
from collections import OrderedDict from collections import OrderedDict
from collections.abc import Generator, Iterable, Mapping from collections.abc import Iterable, Mapping
from copy import copy from copy import copy
from dataclasses import asdict, dataclass, field, is_dataclass from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum from enum import Enum
@ -24,10 +24,6 @@ from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.models import PolicyBindingModel from authentik.policies.models import PolicyBindingModel
class UNSET:
"""Used to test whether a key has not been set."""
def get_attrs(obj: SerializerModel) -> dict[str, Any]: def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict""" """Get object's attributes via their serializer, and convert it to a normal dict"""
serializer: Serializer = obj.serializer(obj) serializer: Serializer = obj.serializer(obj)
@ -62,15 +58,6 @@ class BlueprintEntryDesiredState(Enum):
MUST_CREATED = "must_created" MUST_CREATED = "must_created"
@dataclass
class BlueprintEntryPermission:
"""Describe object-level permissions"""
permission: Union[str, "YAMLTag"]
user: Union[int, "YAMLTag", None] = field(default=None)
role: Union[str, "YAMLTag", None] = field(default=None)
@dataclass @dataclass
class BlueprintEntry: class BlueprintEntry:
"""Single entry of a blueprint""" """Single entry of a blueprint"""
@ -82,7 +69,6 @@ class BlueprintEntry:
conditions: list[Any] = field(default_factory=list) conditions: list[Any] = field(default_factory=list)
identifiers: dict[str, Any] = field(default_factory=dict) identifiers: dict[str, Any] = field(default_factory=dict)
attrs: dict[str, Any] | None = field(default_factory=dict) attrs: dict[str, Any] | None = field(default_factory=dict)
permissions: list[BlueprintEntryPermission] = field(default_factory=list)
id: str | None = None id: str | None = None
@ -164,17 +150,6 @@ class BlueprintEntry:
"""Get the blueprint model, with yaml tags resolved if present""" """Get the blueprint model, with yaml tags resolved if present"""
return str(self.tag_resolver(self.model, blueprint)) return str(self.tag_resolver(self.model, blueprint))
def get_permissions(
self, blueprint: "Blueprint"
) -> Generator[BlueprintEntryPermission, None, None]:
"""Get permissions of this entry, with all yaml tags resolved"""
for perm in self.permissions:
yield BlueprintEntryPermission(
permission=self.tag_resolver(perm.permission, blueprint),
user=self.tag_resolver(perm.user, blueprint),
role=self.tag_resolver(perm.role, blueprint),
)
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool: def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
"""Check all conditions of this entry match (evaluate to True)""" """Check all conditions of this entry match (evaluate to True)"""
return all(self.tag_resolver(self.conditions, blueprint)) return all(self.tag_resolver(self.conditions, blueprint))
@ -202,9 +177,6 @@ class Blueprint:
class YAMLTag: class YAMLTag:
"""Base class for all YAML Tags""" """Base class for all YAML Tags"""
def __repr__(self) -> str:
return str(self.resolve(BlueprintEntry(""), Blueprint()))
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic""" """Implement yaml tag logic"""
raise NotImplementedError raise NotImplementedError
@ -335,10 +307,7 @@ class Find(YAMLTag):
else: else:
model_name = self.model_name model_name = self.model_name
try: model_class = apps.get_model(*model_name.split("."))
model_class = apps.get_model(*model_name.split("."))
except LookupError as exc:
raise EntryInvalidError.from_entry(exc, entry) from exc
query = Q() query = Q()
for cond in self.conditions: for cond in self.conditions:
@ -563,53 +532,6 @@ class Value(EnumeratedItem):
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
class AtIndex(YAMLTag):
"""Get value at index of a sequence or mapping"""
obj: YAMLTag | dict | list | tuple
attribute: int | str | YAMLTag
default: Any | UNSET
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.obj = loader.construct_object(node.value[0])
self.attribute = loader.construct_object(node.value[1])
if len(node.value) == 2: # noqa: PLR2004
self.default = UNSET
else:
self.default = loader.construct_object(node.value[2])
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.obj, YAMLTag):
obj = self.obj.resolve(entry, blueprint)
else:
obj = self.obj
if isinstance(self.attribute, YAMLTag):
attribute = self.attribute.resolve(entry, blueprint)
else:
attribute = self.attribute
if isinstance(obj, list | tuple):
try:
return obj[attribute]
except TypeError as exc:
raise EntryInvalidError.from_entry(
f"Invalid index for list: {attribute}", entry
) from exc
except IndexError as exc:
if self.default is UNSET:
raise EntryInvalidError.from_entry(
f"Index out of range: {attribute}", entry
) from exc
return self.default
if attribute in obj:
return obj[attribute]
else:
if self.default is UNSET:
raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry)
return self.default
class BlueprintDumper(SafeDumper): class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml""" """Dump dataclasses to yaml"""
@ -660,7 +582,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Enumerate", Enumerate) self.add_constructor("!Enumerate", Enumerate)
self.add_constructor("!Value", Value) self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index) self.add_constructor("!Index", Index)
self.add_constructor("!AtIndex", AtIndex)
class EntryInvalidError(SentryIgnoredException): class EntryInvalidError(SentryIgnoredException):

View File

@ -16,7 +16,6 @@ from django.db.models.query_utils import Q
from django.db.transaction import atomic from django.db.transaction import atomic
from django.db.utils import IntegrityError from django.db.utils import IntegrityError
from guardian.models import UserObjectPermission from guardian.models import UserObjectPermission
from guardian.shortcuts import assign_perm
from rest_framework.exceptions import ValidationError from rest_framework.exceptions import ValidationError
from rest_framework.serializers import BaseSerializer, Serializer from rest_framework.serializers import BaseSerializer, Serializer
from structlog.stdlib import BoundLogger, get_logger from structlog.stdlib import BoundLogger, get_logger
@ -33,11 +32,9 @@ from authentik.blueprints.v1.common import (
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
from authentik.core.models import ( from authentik.core.models import (
AuthenticatedSession, AuthenticatedSession,
GroupSourceConnection,
PropertyMapping, PropertyMapping,
Provider, Provider,
Source, Source,
User,
UserSourceConnection, UserSourceConnection,
) )
from authentik.enterprise.license import LicenseKey from authentik.enterprise.license import LicenseKey
@ -51,35 +48,23 @@ from authentik.enterprise.providers.microsoft_entra.models import (
MicrosoftEntraProviderUser, MicrosoftEntraProviderUser,
) )
from authentik.enterprise.providers.rac.models import ConnectionToken from authentik.enterprise.providers.rac.models import ConnectionToken
from authentik.enterprise.providers.ssf.models import StreamEvent
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
EndpointDevice,
EndpointDeviceConnection,
)
from authentik.events.logs import LogEvent, capture_logs from authentik.events.logs import LogEvent, capture_logs
from authentik.events.models import SystemTask from authentik.events.models import SystemTask
from authentik.events.utils import cleanse_dict from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage from authentik.flows.models import FlowToken, Stage
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
from authentik.lib.sentry import SentryIgnoredException from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.reflection import get_apps
from authentik.outposts.models import OutpostServiceConnection from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import ( from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
AccessToken,
AuthorizationCode,
DeviceToken,
RefreshToken,
)
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
from authentik.rbac.models import Role
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
from authentik.tenants.models import Tenant from authentik.tenants.models import Tenant
# Context set when the serializer is created in a blueprint context # Context set when the serializer is created in a blueprint context
# Update website/docs/customize/blueprints/v1/models.md when used # Update website/developer-docs/blueprints/v1/models.md when used
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry" SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
@ -102,7 +87,6 @@ def excluded_models() -> list[type[Model]]:
Source, Source,
PropertyMapping, PropertyMapping,
UserSourceConnection, UserSourceConnection,
GroupSourceConnection,
Stage, Stage,
OutpostServiceConnection, OutpostServiceConnection,
Policy, Policy,
@ -129,10 +113,6 @@ def excluded_models() -> list[type[Model]]:
GoogleWorkspaceProviderGroup, GoogleWorkspaceProviderGroup,
MicrosoftEntraProviderUser, MicrosoftEntraProviderUser,
MicrosoftEntraProviderGroup, MicrosoftEntraProviderGroup,
EndpointDevice,
EndpointDeviceConnection,
DeviceToken,
StreamEvent,
) )
@ -156,16 +136,6 @@ def transaction_rollback():
pass pass
def rbac_models() -> dict:
models = {}
for app in get_apps():
for model in app.get_models():
if not is_model_allowed(model):
continue
models[model._meta.model_name] = app.label
return models
class Importer: class Importer:
"""Import Blueprint from raw dict or YAML/JSON""" """Import Blueprint from raw dict or YAML/JSON"""
@ -184,10 +154,7 @@ class Importer:
def default_context(self): def default_context(self):
"""Default context""" """Default context"""
return { return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()}
"goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid,
"goauthentik.io/rbac/models": rbac_models(),
}
@staticmethod @staticmethod
def from_string(yaml_input: str, context: dict | None = None) -> "Importer": def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
@ -247,17 +214,14 @@ class Importer:
return main_query | sub_query return main_query | sub_query
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None: # noqa: PLR0915 def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer | None:
"""Validate a single entry""" """Validate a single entry"""
if not entry.check_all_conditions_match(self._import): if not entry.check_all_conditions_match(self._import):
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping") self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
return None return None
model_app_label, model_name = entry.get_model(self._import).split(".") model_app_label, model_name = entry.get_model(self._import).split(".")
try: model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
except LookupError as exc:
raise EntryInvalidError.from_entry(exc, entry) from exc
# Don't use isinstance since we don't want to check for inheritance # Don't use isinstance since we don't want to check for inheritance
if not is_model_allowed(model): if not is_model_allowed(model):
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry) raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
@ -301,11 +265,7 @@ class Importer:
serializer_kwargs = {} serializer_kwargs = {}
model_instance = existing_models.first() model_instance = existing_models.first()
if ( if not isinstance(model(), BaseMetaModel) and model_instance:
not isinstance(model(), BaseMetaModel)
and model_instance
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
):
self.logger.debug( self.logger.debug(
"Initialise serializer with instance", "Initialise serializer with instance",
model=model, model=model,
@ -315,12 +275,11 @@ class Importer:
serializer_kwargs["instance"] = model_instance serializer_kwargs["instance"] = model_instance
serializer_kwargs["partial"] = True serializer_kwargs["partial"] = True
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED: elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
msg = (
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} "
"and object exists already",
)
raise EntryInvalidError.from_entry( raise EntryInvalidError.from_entry(
ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"), (
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
"and object exists already",
),
entry, entry,
) )
else: else:
@ -337,7 +296,10 @@ class Importer:
try: try:
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import)) full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
except ValueError as exc: except ValueError as exc:
raise EntryInvalidError.from_entry(exc, entry) from exc raise EntryInvalidError.from_entry(
exc,
entry,
) from exc
always_merger.merge(full_data, updated_identifiers) always_merger.merge(full_data, updated_identifiers)
serializer_kwargs["data"] = full_data serializer_kwargs["data"] = full_data
@ -358,15 +320,6 @@ class Importer:
) from exc ) from exc
return serializer return serializer
def _apply_permissions(self, instance: Model, entry: BlueprintEntry):
"""Apply object-level permissions for an entry"""
for perm in entry.get_permissions(self._import):
if perm.user is not None:
assign_perm(perm.permission, User.objects.get(pk=perm.user), instance)
if perm.role is not None:
role = Role.objects.get(pk=perm.role)
role.assign_permission(perm.permission, obj=instance)
def apply(self) -> bool: def apply(self) -> bool:
"""Apply (create/update) models yaml, in database transaction""" """Apply (create/update) models yaml, in database transaction"""
try: try:
@ -431,7 +384,6 @@ class Importer:
if "pk" in entry.identifiers: if "pk" in entry.identifiers:
self.__pk_map[entry.identifiers["pk"]] = instance.pk self.__pk_map[entry.identifiers["pk"]] = instance.pk
entry._state = BlueprintEntryState(instance) entry._state = BlueprintEntryState(instance)
self._apply_permissions(instance, entry)
elif state == BlueprintEntryDesiredState.ABSENT: elif state == BlueprintEntryDesiredState.ABSENT:
instance: Model | None = serializer.instance instance: Model | None = serializer.instance
if instance.pk: if instance.pk:
@ -448,7 +400,7 @@ class Importer:
orig_import = deepcopy(self._import) orig_import = deepcopy(self._import)
if self._import.version != 1: if self._import.version != 1:
self.logger.warning("Invalid blueprint version") self.logger.warning("Invalid blueprint version")
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)] return False, [{"event": "Invalid blueprint version"}]
with ( with (
transaction_rollback(), transaction_rollback(),
capture_logs() as logs, capture_logs() as logs,

View File

@ -159,7 +159,7 @@ def blueprints_discovery(self: SystemTask, path: str | None = None):
check_blueprint_v1_file(blueprint) check_blueprint_v1_file(blueprint)
count += 1 count += 1
self.set_status( self.set_status(
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count)) TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
) )

View File

@ -14,17 +14,17 @@ from rest_framework.response import Response
from rest_framework.validators import UniqueValidator from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import SecretKeyFilter
from authentik.brands.models import Brand from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.rbac.filters import SecretKeyFilter
from authentik.tenants.utils import get_current_tenant from authentik.tenants.utils import get_current_tenant
class FooterLinkSerializer(PassiveSerializer): class FooterLinkSerializer(PassiveSerializer):
"""Links returned in Config API""" """Links returned in Config API"""
href = CharField(read_only=True, allow_null=True) href = CharField(read_only=True)
name = CharField(read_only=True) name = CharField(read_only=True)
@ -55,7 +55,6 @@ class BrandSerializer(ModelSerializer):
"flow_unenrollment", "flow_unenrollment",
"flow_user_settings", "flow_user_settings",
"flow_device_code", "flow_device_code",
"default_application",
"web_certificate", "web_certificate",
"attributes", "attributes",
] ]
@ -84,8 +83,8 @@ class CurrentBrandSerializer(PassiveSerializer):
matched_domain = CharField(source="domain") matched_domain = CharField(source="domain")
branding_title = CharField() branding_title = CharField()
branding_logo = CharField(source="branding_logo_url") branding_logo = CharField()
branding_favicon = CharField(source="branding_favicon_url") branding_favicon = CharField()
ui_footer_links = ListField( ui_footer_links = ListField(
child=FooterLinkSerializer(), child=FooterLinkSerializer(),
read_only=True, read_only=True,

View File

@ -9,6 +9,3 @@ class AuthentikBrandsConfig(AppConfig):
name = "authentik.brands" name = "authentik.brands"
label = "authentik_brands" label = "authentik_brands"
verbose_name = "authentik Brands" verbose_name = "authentik Brands"
mountpoints = {
"authentik.brands.urls_root": "",
}

View File

@ -4,7 +4,7 @@ from collections.abc import Callable
from django.http.request import HttpRequest from django.http.request import HttpRequest
from django.http.response import HttpResponse from django.http.response import HttpResponse
from django.utils.translation import override from django.utils.translation import activate
from authentik.brands.utils import get_brand_for_request from authentik.brands.utils import get_brand_for_request
@ -18,14 +18,10 @@ class BrandMiddleware:
self.get_response = get_response self.get_response = get_response
def __call__(self, request: HttpRequest) -> HttpResponse: def __call__(self, request: HttpRequest) -> HttpResponse:
locale_to_set = None
if not hasattr(request, "brand"): if not hasattr(request, "brand"):
brand = get_brand_for_request(request) brand = get_brand_for_request(request)
request.brand = brand request.brand = brand
locale = brand.default_locale locale = brand.default_locale
if locale != "": if locale != "":
locale_to_set = locale activate(locale)
if locale_to_set:
with override(locale_to_set):
return self.get_response(request)
return self.get_response(request) return self.get_response(request)

View File

@ -1,26 +0,0 @@
# Generated by Django 5.0.6 on 2024-07-04 20:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0006_brand_authentik_b_domain_b9b24a_idx_and_more"),
("authentik_core", "0035_alter_group_options_and_more"),
]
operations = [
migrations.AddField(
model_name="brand",
name="default_application",
field=models.ForeignKey(
default=None,
help_text="When set, external users will be redirected to this application after authenticating.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="authentik_core.application",
),
),
]

View File

@ -3,14 +3,12 @@
from uuid import uuid4 from uuid import uuid4
from django.db import models from django.db import models
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from rest_framework.serializers import Serializer from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.crypto.models import CertificateKeyPair from authentik.crypto.models import CertificateKeyPair
from authentik.flows.models import Flow from authentik.flows.models import Flow
from authentik.lib.config import CONFIG
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
LOGGER = get_logger() LOGGER = get_logger()
@ -53,16 +51,6 @@ class Brand(SerializerModel):
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code" Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code"
) )
default_application = models.ForeignKey(
"authentik_core.Application",
null=True,
default=None,
on_delete=models.SET_DEFAULT,
help_text=_(
"When set, external users will be redirected to this application after authenticating."
),
)
web_certificate = models.ForeignKey( web_certificate = models.ForeignKey(
CertificateKeyPair, CertificateKeyPair,
null=True, null=True,
@ -72,18 +60,6 @@ class Brand(SerializerModel):
) )
attributes = models.JSONField(default=dict, blank=True) attributes = models.JSONField(default=dict, blank=True)
def branding_logo_url(self) -> str:
"""Get branding_logo with the correct prefix"""
if self.branding_logo.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_logo
return self.branding_logo
def branding_favicon_url(self) -> str:
"""Get branding_favicon with the correct prefix"""
if self.branding_favicon.startswith("/static"):
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
return self.branding_favicon
@property @property
def serializer(self) -> Serializer: def serializer(self) -> Serializer:
from authentik.brands.api import BrandSerializer from authentik.brands.api import BrandSerializer
@ -112,13 +88,3 @@ class Brand(SerializerModel):
models.Index(fields=["domain"]), models.Index(fields=["domain"]),
models.Index(fields=["default"]), models.Index(fields=["default"]),
] ]
class WebfingerProvider(models.Model):
"""Provider which supports webfinger discovery"""
class Meta:
abstract = True
def webfinger(self, resource: str, request: HttpRequest) -> dict:
raise NotImplementedError()

View File

@ -5,11 +5,7 @@ from rest_framework.test import APITestCase
from authentik.brands.api import Themes from authentik.brands.api import Themes
from authentik.brands.models import Brand from authentik.brands.models import Brand
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_brand from authentik.core.tests.utils import create_test_admin_user, create_test_brand
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.models import OAuth2Provider
from authentik.providers.saml.models import SAMLProvider
class TestBrands(APITestCase): class TestBrands(APITestCase):
@ -79,45 +75,3 @@ class TestBrands(APITestCase):
reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True} reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True}
) )
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
def test_webfinger_no_app(self):
"""Test Webfinger"""
create_test_brand()
self.assertJSONEqual(
self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {}
)
def test_webfinger_not_supported(self):
"""Test Webfinger"""
brand = create_test_brand()
provider = SAMLProvider.objects.create(
name=generate_id(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider)
brand.default_application = app
brand.save()
self.assertJSONEqual(
self.client.get(reverse("authentik_brands:webfinger")).content.decode(), {}
)
def test_webfinger_oidc(self):
"""Test Webfinger"""
brand = create_test_brand()
provider = OAuth2Provider.objects.create(
name=generate_id(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider)
brand.default_application = app
brand.save()
self.assertJSONEqual(
self.client.get(reverse("authentik_brands:webfinger")).content.decode(),
{
"links": [
{
"href": f"http://testserver/application/o/{app.slug}/",
"rel": "http://openid.net/specs/connect/1.0/issuer",
}
],
"subject": None,
},
)

View File

@ -1,9 +0,0 @@
"""authentik brand root URLs"""
from django.urls import path
from authentik.brands.views.webfinger import WebFingerView
urlpatterns = [
path(".well-known/webfinger", WebFingerView.as_view(), name="webfinger"),
]

View File

@ -5,7 +5,7 @@ from typing import Any
from django.db.models import F, Q from django.db.models import F, Q
from django.db.models import Value as V from django.db.models import Value as V
from django.http.request import HttpRequest from django.http.request import HttpRequest
from sentry_sdk import get_current_span from sentry_sdk.hub import Hub
from authentik import get_full_version from authentik import get_full_version
from authentik.brands.models import Brand from authentik.brands.models import Brand
@ -33,7 +33,7 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
brand = getattr(request, "brand", DEFAULT_BRAND) brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant()) tenant = getattr(request, "tenant", Tenant())
trace = "" trace = ""
span = get_current_span() span = Hub.current.scope.span
if span: if span:
trace = span.to_traceparent() trace = span.to_traceparent()
return { return {

View File

@ -1,29 +0,0 @@
from typing import Any
from django.http import HttpRequest, HttpResponse, JsonResponse
from django.views import View
from authentik.brands.models import Brand, WebfingerProvider
from authentik.core.models import Application
class WebFingerView(View):
"""Webfinger endpoint"""
def get(self, request: HttpRequest) -> HttpResponse:
brand: Brand = request.brand
if not brand.default_application:
return JsonResponse({})
application: Application = brand.default_application
provider = application.get_provider()
if not provider or not isinstance(provider, WebfingerProvider):
return JsonResponse({})
webfinger_data = provider.webfinger(request.GET.get("resource"), request)
return JsonResponse(webfinger_data)
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
response = super().dispatch(request, *args, **kwargs)
# RFC7033 spec
response["Access-Control-Allow-Origin"] = "*"
response["Content-Type"] = "application/jrd+json"
return response

View File

@ -1,58 +0,0 @@
"""Application Roles API Viewset"""
from django.http import HttpRequest
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import (
Application,
ApplicationEntitlement,
)
class ApplicationEntitlementSerializer(ModelSerializer):
"""ApplicationEntitlement Serializer"""
def validate_app(self, app: Application) -> Application:
"""Ensure user has permission to view"""
request: HttpRequest = self.context.get("request")
if not request and SERIALIZER_CONTEXT_BLUEPRINT in self.context:
return app
user = request.user
if user.has_perm("view_application", app) or user.has_perm(
"authentik_core.view_application"
):
return app
raise ValidationError(_("User does not have access to application."), code="invalid")
class Meta:
model = ApplicationEntitlement
fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
class ApplicationEntitlementViewSet(UsedByMixin, ModelViewSet):
"""ApplicationEntitlement Viewset"""
queryset = ApplicationEntitlement.objects.all()
serializer_class = ApplicationEntitlementSerializer
search_fields = [
"pbm_uuid",
"name",
"app",
"attributes",
]
filterset_fields = [
"pbm_uuid",
"name",
"app",
]
ordering = ["name"]

View File

@ -7,7 +7,9 @@ from datetime import timedelta
from django.core.cache import cache from django.core.cache import cache
from django.db.models import QuerySet from django.db.models import QuerySet
from django.db.models.functions import ExtractHour from django.db.models.functions import ExtractHour
from django.http import HttpRequest
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from django.urls import reverse
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from guardian.shortcuts import get_objects_for_user from guardian.shortcuts import get_objects_for_user
@ -65,10 +67,16 @@ class ApplicationSerializer(ModelSerializer):
def get_launch_url(self, app: Application) -> str | None: def get_launch_url(self, app: Application) -> str | None:
"""Allow formatting of launch URL""" """Allow formatting of launch URL"""
user = None rel_url = reverse(
"authentik_core:application-launch",
kwargs={
"application_slug": app.slug,
},
)
if "request" in self.context: if "request" in self.context:
user = self.context["request"].user request: HttpRequest = self.context["request"]
return app.get_launch_url(user) return request.build_absolute_uri(rel_url)
return rel_url
def __init__(self, *args, **kwargs) -> None: def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@ -103,12 +111,7 @@ class ApplicationSerializer(ModelSerializer):
class ApplicationViewSet(UsedByMixin, ModelViewSet): class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Application Viewset""" """Application Viewset"""
queryset = ( queryset = Application.objects.all().prefetch_related("provider").prefetch_related("policies")
Application.objects.all()
.with_provider()
.prefetch_related("policies")
.prefetch_related("backchannel_providers")
)
serializer_class = ApplicationSerializer serializer_class = ApplicationSerializer
search_fields = [ search_fields = [
"name", "name",
@ -152,9 +155,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
applications.append(application) applications.append(application)
return applications return applications
def _filter_applications_with_launch_url( def _filter_applications_with_launch_url(self, pagined_apps: Iterator[Application]) -> list[Application]:
self, pagined_apps: Iterator[Application]
) -> list[Application]:
applications = [] applications = []
for app in pagined_apps: for app in pagined_apps:
if app.get_launch_url(): if app.get_launch_url():
@ -235,9 +236,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if superuser_full_list and request.user.is_superuser: if superuser_full_list and request.user.is_superuser:
return super().list(request) return super().list(request)
only_with_launch_url = str( only_with_launch_url = str(request.query_params.get("only_with_launch_url", "false")).lower()
request.query_params.get("only_with_launch_url", "false")
).lower()
queryset = self._filter_queryset_for_list(self.get_queryset()) queryset = self._filter_queryset_for_list(self.get_queryset())
paginator: Pagination = self.paginator paginator: Pagination = self.paginator

View File

@ -2,12 +2,16 @@
from typing import TypedDict from typing import TypedDict
from django_filters.rest_framework import DjangoFilterBackend
from guardian.utils import get_anonymous_user
from rest_framework import mixins from rest_framework import mixins
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer from authentik.core.api.utils import ModelSerializer
from authentik.core.models import AuthenticatedSession from authentik.core.models import AuthenticatedSession
@ -106,4 +110,11 @@ class AuthenticatedSessionViewSet(
search_fields = ["user__username", "last_ip", "last_user_agent"] search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"] filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"] ordering = ["user__username"]
owner_field = "user" permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)

View File

@ -1,55 +1,30 @@
"""Authenticator Devices API Views""" """Authenticator Devices API Views"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
from rest_framework.fields import ( from rest_framework.permissions import IsAdminUser, IsAuthenticated
BooleanField,
CharField,
DateTimeField,
SerializerMethodField,
)
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.viewsets import ViewSet from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import MetaNameSerializer from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
class DeviceSerializer(MetaNameSerializer): class DeviceSerializer(MetaNameSerializer):
"""Serializer for Duo authenticator devices""" """Serializer for Duo authenticator devices"""
pk = CharField() pk = IntegerField()
name = CharField() name = CharField()
type = SerializerMethodField() type = SerializerMethodField()
confirmed = BooleanField() confirmed = BooleanField()
created = DateTimeField(read_only=True)
last_updated = DateTimeField(read_only=True)
last_used = DateTimeField(read_only=True, allow_null=True)
extra_description = SerializerMethodField()
def get_type(self, instance: Device) -> str: def get_type(self, instance: Device) -> str:
"""Get type of device""" """Get type of device"""
return instance._meta.label return instance._meta.label
def get_extra_description(self, instance: Device) -> str:
"""Get extra description"""
if isinstance(instance, WebAuthnDevice):
return (
instance.device_type.description
if instance.device_type
else _("Extra description not available")
)
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return ""
class DeviceViewSet(ViewSet): class DeviceViewSet(ViewSet):
"""Viewset for authenticator devices""" """Viewset for authenticator devices"""
@ -68,14 +43,12 @@ class AdminDeviceViewSet(ViewSet):
"""Viewset for authenticator devices""" """Viewset for authenticator devices"""
serializer_class = DeviceSerializer serializer_class = DeviceSerializer
permission_classes = [] permission_classes = [IsAdminUser]
def get_devices(self, **kwargs): def get_devices(self, **kwargs):
"""Get all devices in all child classes""" """Get all devices in all child classes"""
for model in device_classes(): for model in device_classes():
device_set = get_objects_for_user( device_set = model.objects.filter(**kwargs)
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
).filter(**kwargs)
yield from device_set yield from device_set
@extend_schema( @extend_schema(

View File

@ -2,15 +2,8 @@
from json import dumps from json import dumps
from django_filters.filters import AllValuesMultipleFilter, BooleanFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import ( from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
OpenApiParameter,
OpenApiResponse,
extend_schema,
extend_schema_field,
)
from guardian.shortcuts import get_objects_for_user from guardian.shortcuts import get_objects_for_user
from rest_framework import mixins from rest_framework import mixins
from rest_framework.decorators import action from rest_framework.decorators import action
@ -30,10 +23,8 @@ from authentik.core.api.utils import (
PassiveSerializer, PassiveSerializer,
) )
from authentik.core.expression.evaluator import PropertyMappingEvaluator from authentik.core.expression.evaluator import PropertyMappingEvaluator
from authentik.core.expression.exceptions import PropertyMappingExpressionException
from authentik.core.models import Group, PropertyMapping, User from authentik.core.models import Group, PropertyMapping, User
from authentik.events.utils import sanitize_item from authentik.events.utils import sanitize_item
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.api.exec import PolicyTestSerializer from authentik.policies.api.exec import PolicyTestSerializer
from authentik.rbac.decorators import permission_required from authentik.rbac.decorators import permission_required
@ -76,18 +67,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
] ]
class PropertyMappingFilterSet(FilterSet):
"""Filter for PropertyMapping"""
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
managed__isnull = BooleanFilter(field_name="managed", lookup_expr="isnull")
class Meta:
model = PropertyMapping
fields = ["name", "managed"]
class PropertyMappingViewSet( class PropertyMappingViewSet(
TypesMixin, TypesMixin,
mixins.RetrieveModelMixin, mixins.RetrieveModelMixin,
@ -108,9 +87,11 @@ class PropertyMappingViewSet(
queryset = PropertyMapping.objects.select_subclasses() queryset = PropertyMapping.objects.select_subclasses()
serializer_class = PropertyMappingSerializer serializer_class = PropertyMappingSerializer
filterset_class = PropertyMappingFilterSet search_fields = [
"name",
]
filterset_fields = {"managed": ["isnull"]}
ordering = ["name"] ordering = ["name"]
search_fields = ["name"]
@permission_required("authentik_core.view_propertymapping") @permission_required("authentik_core.view_propertymapping")
@extend_schema( @extend_schema(
@ -164,15 +145,12 @@ class PropertyMappingViewSet(
response_data = {"successful": True, "result": ""} response_data = {"successful": True, "result": ""}
try: try:
result = mapping.evaluate(dry_run=True, **context) result = mapping.evaluate(**context)
response_data["result"] = dumps( response_data["result"] = dumps(
sanitize_item(result), indent=(4 if format_result else None) sanitize_item(result), indent=(4 if format_result else None)
) )
except PropertyMappingExpressionException as exc:
response_data["result"] = exception_to_string(exc.exc)
response_data["successful"] = False
except Exception as exc: except Exception as exc:
response_data["result"] = exception_to_string(exc) response_data["result"] = str(exc)
response_data["successful"] = False response_data["successful"] = False
response = PropertyMappingTestResultSerializer(response_data) response = PropertyMappingTestResultSerializer(response_data)
return Response(response.data) return Response(response.data)

View File

@ -38,7 +38,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
"name", "name",
"authentication_flow", "authentication_flow",
"authorization_flow", "authorization_flow",
"invalidation_flow",
"property_mappings", "property_mappings",
"component", "component",
"assigned_application_slug", "assigned_application_slug",
@ -51,7 +50,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
] ]
extra_kwargs = { extra_kwargs = {
"authorization_flow": {"required": True, "allow_null": False}, "authorization_flow": {"required": True, "allow_null": False},
"invalidation_flow": {"required": True, "allow_null": False},
} }

View File

@ -2,21 +2,24 @@
from collections.abc import Iterable from collections.abc import Iterable
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import mixins from rest_framework import mixins
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.parsers import MultiPartParser from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet from rest_framework.viewsets import GenericViewSet
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.object_types import TypesMixin from authentik.core.api.object_types import TypesMixin
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import MetaNameSerializer, ModelSerializer from authentik.core.api.utils import MetaNameSerializer, ModelSerializer
from authentik.core.models import GroupSourceConnection, Source, UserSourceConnection from authentik.core.models import Source, UserSourceConnection
from authentik.core.types import UserSettingSerializer from authentik.core.types import UserSettingSerializer
from authentik.lib.utils.file import ( from authentik.lib.utils.file import (
FilePathSerializer, FilePathSerializer,
@ -57,8 +60,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
"enabled", "enabled",
"authentication_flow", "authentication_flow",
"enrollment_flow", "enrollment_flow",
"user_property_mappings",
"group_property_mappings",
"component", "component",
"verbose_name", "verbose_name",
"verbose_name_plural", "verbose_name_plural",
@ -85,7 +86,7 @@ class SourceViewSet(
serializer_class = SourceSerializer serializer_class = SourceSerializer
lookup_field = "slug" lookup_field = "slug"
search_fields = ["slug", "name"] search_fields = ["slug", "name"]
filterset_fields = ["slug", "name", "managed", "pbm_uuid"] filterset_fields = ["slug", "name", "managed"]
def get_queryset(self): # pragma: no cover def get_queryset(self): # pragma: no cover
return Source.objects.select_subclasses() return Source.objects.select_subclasses()
@ -156,9 +157,9 @@ class SourceViewSet(
class UserSourceConnectionSerializer(SourceSerializer): class UserSourceConnectionSerializer(SourceSerializer):
"""User source connection""" """OAuth Source Serializer"""
source_obj = SourceSerializer(read_only=True, source="source") source = SourceSerializer(read_only=True)
class Meta: class Meta:
model = UserSourceConnection model = UserSourceConnection
@ -166,10 +167,10 @@ class UserSourceConnectionSerializer(SourceSerializer):
"pk", "pk",
"user", "user",
"source", "source",
"source_obj",
"created", "created",
] ]
extra_kwargs = { extra_kwargs = {
"user": {"read_only": True},
"created": {"read_only": True}, "created": {"read_only": True},
} }
@ -186,45 +187,7 @@ class UserSourceConnectionViewSet(
queryset = UserSourceConnection.objects.all() queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer serializer_class = UserSourceConnectionSerializer
filterset_fields = ["user", "source__slug"] permission_classes = [OwnerSuperuserPermissions]
search_fields = ["source__slug"] filterset_fields = ["user"]
ordering = ["source__slug", "pk"] filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
owner_field = "user" ordering = ["pk"]
class GroupSourceConnectionSerializer(SourceSerializer):
"""Group Source Connection"""
source_obj = SourceSerializer(read_only=True)
class Meta:
model = GroupSourceConnection
fields = [
"pk",
"group",
"source",
"source_obj",
"identifier",
"created",
]
extra_kwargs = {
"created": {"read_only": True},
}
class GroupSourceConnectionViewSet(
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
UsedByMixin,
mixins.ListModelMixin,
GenericViewSet,
):
"""Group-source connection Viewset"""
queryset = GroupSourceConnection.objects.all()
serializer_class = GroupSourceConnectionSerializer
filterset_fields = ["group", "source__slug"]
search_fields = ["source__slug"]
ordering = ["source__slug", "pk"]
owner_field = "user"

View File

@ -3,15 +3,18 @@
from typing import Any from typing import Any
from django.utils.timezone import now from django.utils.timezone import now
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import assign_perm, get_anonymous_user from guardian.shortcuts import assign_perm, get_anonymous_user
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField from rest_framework.fields import CharField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.blueprints.api import ManagedSerializer from authentik.blueprints.api import ManagedSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
@ -135,8 +138,8 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"managed", "managed",
] ]
ordering = ["identifier", "expires"] ordering = ["identifier", "expires"]
owner_field = "user" permission_classes = [OwnerSuperuserPermissions]
rbac_allow_create_without_perm = True filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self): def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user() user = self.request.user if self.request else get_anonymous_user()

View File

@ -1,12 +1,10 @@
"""transactional application and provider creation""" """transactional application and provider creation"""
from django.apps import apps from django.apps import apps
from django.db.models import Model
from django.utils.translation import gettext as _
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
from rest_framework.exceptions import PermissionDenied, ValidationError from rest_framework.exceptions import ValidationError
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
@ -22,9 +20,8 @@ from authentik.blueprints.v1.common import (
from authentik.blueprints.v1.importer import Importer from authentik.blueprints.v1.importer import Importer
from authentik.core.api.applications import ApplicationSerializer from authentik.core.api.applications import ApplicationSerializer
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import Application, Provider from authentik.core.models import Provider
from authentik.lib.utils.reflection import all_subclasses from authentik.lib.utils.reflection import all_subclasses
from authentik.policies.api.bindings import PolicyBindingSerializer
def get_provider_serializer_mapping(): def get_provider_serializer_mapping():
@ -48,20 +45,6 @@ class TransactionProviderField(DictField):
"""Dictionary field which can hold provider creation data""" """Dictionary field which can hold provider creation data"""
class TransactionPolicyBindingSerializer(PolicyBindingSerializer):
"""PolicyBindingSerializer which does not require target as target is set implicitly"""
def validate(self, attrs):
# As the PolicyBindingSerializer checks that the correct things can be bound to a target
# but we don't have a target here as that's set by the blueprint, pass in an empty app
# which will have the correct allowed combination of group/user/policy.
attrs["target"] = Application()
return super().validate(attrs)
class Meta(PolicyBindingSerializer.Meta):
fields = [x for x in PolicyBindingSerializer.Meta.fields if x != "target"]
class TransactionApplicationSerializer(PassiveSerializer): class TransactionApplicationSerializer(PassiveSerializer):
"""Serializer for creating a provider and an application in one transaction""" """Serializer for creating a provider and an application in one transaction"""
@ -69,8 +52,6 @@ class TransactionApplicationSerializer(PassiveSerializer):
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys())) provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
provider = TransactionProviderField() provider = TransactionProviderField()
policy_bindings = TransactionPolicyBindingSerializer(many=True, required=False)
_provider_model: type[Provider] = None _provider_model: type[Provider] = None
def validate_provider_model(self, fq_model_name: str) -> str: def validate_provider_model(self, fq_model_name: str) -> str:
@ -115,19 +96,6 @@ class TransactionApplicationSerializer(PassiveSerializer):
id="app", id="app",
) )
) )
for binding in attrs.get("policy_bindings", []):
binding["target"] = KeyOf(None, ScalarNode(tag="", value="app"))
for key, value in binding.items():
if not isinstance(value, Model):
continue
binding[key] = value.pk
blueprint.entries.append(
BlueprintEntry(
model="authentik_policies.policybinding",
state=BlueprintEntryDesiredState.MUST_CREATED,
identifiers=binding,
)
)
importer = Importer(blueprint, {}) importer = Importer(blueprint, {})
try: try:
valid, _ = importer.validate(raise_validation_errors=True) valid, _ = importer.validate(raise_validation_errors=True)
@ -152,7 +120,8 @@ class TransactionApplicationResponseSerializer(PassiveSerializer):
class TransactionalApplicationView(APIView): class TransactionalApplicationView(APIView):
"""Create provider and application and attach them in a single transaction""" """Create provider and application and attach them in a single transaction"""
permission_classes = [IsAuthenticated] # TODO: Migrate to a more specific permission
permission_classes = [IsAdminUser]
@extend_schema( @extend_schema(
request=TransactionApplicationSerializer(), request=TransactionApplicationSerializer(),
@ -164,23 +133,8 @@ class TransactionalApplicationView(APIView):
"""Convert data into a blueprint, validate it and apply it""" """Convert data into a blueprint, validate it and apply it"""
data = TransactionApplicationSerializer(data=request.data) data = TransactionApplicationSerializer(data=request.data)
data.is_valid(raise_exception=True) data.is_valid(raise_exception=True)
blueprint: Blueprint = data.validated_data
for entry in blueprint.entries: importer = Importer(data.validated_data, {})
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
if not request.user.has_perm(f"{app}.add_{model}"):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
importer = Importer(blueprint, {})
applied = importer.apply() applied = importer.apply()
response = {"applied": False, "logs": []} response = {"applied": False, "logs": []}
response["applied"] = applied response["applied"] = applied

View File

@ -14,7 +14,6 @@ from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.rbac.filters import ObjectFilter
class DeleteAction(Enum): class DeleteAction(Enum):
@ -54,7 +53,7 @@ class UsedByMixin:
@extend_schema( @extend_schema(
responses={200: UsedBySerializer(many=True)}, responses={200: UsedBySerializer(many=True)},
) )
@action(detail=True, pagination_class=None, filter_backends=[ObjectFilter]) @action(detail=True, pagination_class=None, filter_backends=[])
def used_by(self, request: Request, *args, **kwargs) -> Response: def used_by(self, request: Request, *args, **kwargs) -> Response:
"""Get a list of all objects that use this object""" """Get a list of all objects that use this object"""
model: Model = self.get_object() model: Model = self.get_object()

View File

@ -5,7 +5,6 @@ from json import loads
from typing import Any from typing import Any
from django.contrib.auth import update_session_auth_hash from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import Permission
from django.contrib.sessions.backends.cache import KEY_PREFIX from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache from django.core.cache import cache
from django.db.models.functions import ExtractHour from django.db.models.functions import ExtractHour
@ -34,21 +33,15 @@ from drf_spectacular.utils import (
) )
from guardian.shortcuts import get_objects_for_user from guardian.shortcuts import get_objects_for_user
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError from rest_framework.fields import CharField, IntegerField, ListField, SerializerMethodField
from rest_framework.fields import (
BooleanField,
CharField,
ChoiceField,
DateTimeField,
IntegerField,
ListField,
SerializerMethodField,
)
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.serializers import ( from rest_framework.serializers import (
BooleanField,
DateTimeField,
ListSerializer, ListSerializer,
PrimaryKeyRelatedField, PrimaryKeyRelatedField,
ValidationError,
) )
from rest_framework.validators import UniqueValidator from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
@ -85,7 +78,6 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
from authentik.flows.views.executor import QS_KEY_TOKEN from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.avatars import get_avatar from authentik.lib.avatars import get_avatar
from authentik.rbac.decorators import permission_required from authentik.rbac.decorators import permission_required
from authentik.rbac.models import get_permission_choices
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage from authentik.stages.email.utils import TemplateEmailMessage
@ -149,19 +141,12 @@ class UserSerializer(ModelSerializer):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context: if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
self.fields["password"] = CharField(required=False, allow_null=True) self.fields["password"] = CharField(required=False, allow_null=True)
self.fields["permissions"] = ListField(
required=False, child=ChoiceField(choices=get_permission_choices())
)
def create(self, validated_data: dict) -> User: def create(self, validated_data: dict) -> User:
"""If this serializer is used in the blueprint context, we allow for """If this serializer is used in the blueprint context, we allow for
directly setting a password. However should be done via the `set_password` directly setting a password. However should be done via the `set_password`
method instead of directly setting it like rest_framework.""" method instead of directly setting it like rest_framework."""
password = validated_data.pop("password", None) password = validated_data.pop("password", None)
permissions = Permission.objects.filter(
codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])]
)
validated_data["user_permissions"] = permissions
instance: User = super().create(validated_data) instance: User = super().create(validated_data)
self._set_password(instance, password) self._set_password(instance, password)
return instance return instance
@ -170,10 +155,6 @@ class UserSerializer(ModelSerializer):
"""Same as `create` above, set the password directly if we're in a blueprint """Same as `create` above, set the password directly if we're in a blueprint
context""" context"""
password = validated_data.pop("password", None) password = validated_data.pop("password", None)
permissions = Permission.objects.filter(
codename__in=[x.split(".")[1] for x in validated_data.pop("permissions", [])]
)
validated_data["user_permissions"] = permissions
instance = super().update(instance, validated_data) instance = super().update(instance, validated_data)
self._set_password(instance, password) self._set_password(instance, password)
return instance return instance
@ -236,11 +217,9 @@ class UserSerializer(ModelSerializer):
"path", "path",
"type", "type",
"uuid", "uuid",
"password_change_date",
] ]
extra_kwargs = { extra_kwargs = {
"name": {"allow_blank": True}, "name": {"allow_blank": True},
"password_change_date": {"read_only": True},
} }
@ -429,7 +408,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
queryset = User.objects.none() queryset = User.objects.none()
ordering = ["username"] ordering = ["username"]
serializer_class = UserSerializer serializer_class = UserSerializer
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] search_fields = ["username", "name", "is_active", "email", "uuid"]
filterset_class = UsersFilter filterset_class = UsersFilter
def get_queryset(self): def get_queryset(self):
@ -587,7 +566,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""Set password for user""" """Set password for user"""
user: User = self.get_object() user: User = self.get_object()
try: try:
user.set_password(request.data.get("password"), request=request) user.set_password(request.data.get("password"))
user.save() user.save()
except (ValidationError, IntegrityError) as exc: except (ValidationError, IntegrityError) as exc:
LOGGER.debug("Failed to set password", exc=exc) LOGGER.debug("Failed to set password", exc=exc)
@ -668,12 +647,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.impersonate") @permission_required("authentik_core.impersonate")
@extend_schema( @extend_schema(
request=inline_serializer( request=OpenApiTypes.NONE,
"ImpersonationSerializer",
{
"reason": CharField(required=True),
},
),
responses={ responses={
"204": OpenApiResponse(description="Successfully started impersonation"), "204": OpenApiResponse(description="Successfully started impersonation"),
"401": OpenApiResponse(description="Access denied"), "401": OpenApiResponse(description="Access denied"),
@ -685,27 +659,18 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if not request.tenant.impersonation: if not request.tenant.impersonation:
LOGGER.debug("User attempted to impersonate", user=request.user) LOGGER.debug("User attempted to impersonate", user=request.user)
return Response(status=401) return Response(status=401)
user_to_be = self.get_object() if not request.user.has_perm("impersonate"):
reason = request.data.get("reason", "")
# Check both object-level perms and global perms
if not request.user.has_perm(
"authentik_core.impersonate", user_to_be
) and not request.user.has_perm("authentik_core.impersonate"):
LOGGER.debug("User attempted to impersonate without permissions", user=request.user) LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
return Response(status=401) return Response(status=401)
user_to_be = self.get_object()
if user_to_be.pk == self.request.user.pk: if user_to_be.pk == self.request.user.pk:
LOGGER.debug("User attempted to impersonate themselves", user=request.user) LOGGER.debug("User attempted to impersonate themselves", user=request.user)
return Response(status=401) return Response(status=401)
if not reason and request.tenant.impersonation_require_reason:
LOGGER.debug(
"User attempted to impersonate without providing a reason", user=request.user
)
return Response(status=401)
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
Event.new(EventAction.IMPERSONATION_STARTED, reason=reason).from_http(request, user_to_be) Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
return Response(status=201) return Response(status=201)

View File

@ -44,12 +44,13 @@ class TokenBackend(InbuiltBackend):
self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any self, request: HttpRequest, username: str | None, password: str | None, **kwargs: Any
) -> User | None: ) -> User | None:
try: try:
user = User._default_manager.get_by_natural_key(username) user = User._default_manager.get_by_natural_key(username)
except User.DoesNotExist: except User.DoesNotExist:
# Run the default password hasher once to reduce the timing # Run the default password hasher once to reduce the timing
# difference between an existing and a nonexistent user (#20760). # difference between an existing and a nonexistent user (#20760).
User().set_password(password, request=request) User().set_password(password)
return None return None
tokens = Token.filter_not_expired( tokens = Token.filter_not_expired(

View File

@ -58,7 +58,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
self._context["user"] = user self._context["user"] = user
if request: if request:
req.http_request = request req.http_request = request
self._context["http_request"] = request
req.context.update(**kwargs) req.context.update(**kwargs)
self._context["request"] = req self._context["request"] = req
self._context.update(**kwargs) self._context.update(**kwargs)

View File

@ -1,32 +0,0 @@
"""Change user type"""
from authentik.core.models import User, UserTypes
from authentik.tenants.management import TenantCommand
class Command(TenantCommand):
"""Change user type"""
def add_arguments(self, parser):
parser.add_argument("--type", type=str, required=True)
parser.add_argument("--all", action="store_true", default=False)
parser.add_argument("usernames", nargs="*", type=str)
def handle_per_tenant(self, **options):
print(options)
new_type = UserTypes(options["type"])
qs = (
User.objects.exclude_anonymous()
.exclude(type=UserTypes.SERVICE_ACCOUNT)
.exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT)
)
if options["usernames"] and options["all"]:
self.stderr.write("--all and usernames specified, only one can be specified")
return
if not options["usernames"] and not options["all"]:
self.stderr.write("--all or usernames must be specified")
return
if options["usernames"] and not options["all"]:
qs = qs.filter(username__in=options["usernames"])
updated = qs.update(type=new_type)
self.stdout.write(f"Updated {updated} users.")

View File

@ -5,7 +5,6 @@ from typing import TextIO
from daphne.management.commands.runserver import Command as RunServer from daphne.management.commands.runserver import Command as RunServer
from daphne.server import Server from daphne.server import Server
from authentik.lib.debug import start_debug_server
from authentik.root.signals import post_startup, pre_startup, startup from authentik.root.signals import post_startup, pre_startup, startup
@ -14,7 +13,6 @@ class SignalServer(Server):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
start_debug_server()
def ready_callable(): def ready_callable():
pre_startup.send(sender=self) pre_startup.send(sender=self)

View File

@ -4,7 +4,6 @@ import code
import platform import platform
import sys import sys
import traceback import traceback
from pprint import pprint
from django.apps import apps from django.apps import apps
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
@ -17,9 +16,7 @@ from authentik.events.middleware import should_log_model
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict from authentik.events.utils import model_to_dict
BANNER_TEXT = f"""### authentik shell ({get_full_version()})
def get_banner_text(shell_type="shell") -> str:
return f"""### authentik {shell_type} ({get_full_version()})
### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """ ### Node {platform.node()} | Arch {platform.machine()} | Python {platform.python_version()} """
@ -37,9 +34,7 @@ class Command(BaseCommand):
def get_namespace(self): def get_namespace(self):
"""Prepare namespace with all models""" """Prepare namespace with all models"""
namespace = { namespace = {}
"pprint": pprint,
}
# Gather Django models and constants from each app # Gather Django models and constants from each app
for app in apps.get_app_configs(): for app in apps.get_app_configs():
@ -116,4 +111,4 @@ class Command(BaseCommand):
readline.parse_and_bind("tab: complete") readline.parse_and_bind("tab: complete")
# Run interactive shell # Run interactive shell
code.interact(banner=get_banner_text(), local=namespace) code.interact(banner=BANNER_TEXT, local=namespace)

View File

@ -9,7 +9,6 @@ from django.db import close_old_connections
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.debug import start_debug_server
from authentik.root.celery import CELERY_APP from authentik.root.celery import CELERY_APP
LOGGER = get_logger() LOGGER = get_logger()
@ -29,7 +28,10 @@ class Command(BaseCommand):
def handle(self, **options): def handle(self, **options):
LOGGER.debug("Celery options", **options) LOGGER.debug("Celery options", **options)
close_old_connections() close_old_connections()
start_debug_server() if CONFIG.get_bool("remote_debug"):
import debugpy
debugpy.listen(("0.0.0.0", 6900)) # nosec
worker: Worker = CELERY_APP.Worker( worker: Worker = CELERY_APP.Worker(
no_color=False, no_color=False,
quiet=True, quiet=True,

View File

@ -5,7 +5,7 @@ from contextvars import ContextVar
from uuid import uuid4 from uuid import uuid4
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
from django.utils.translation import override from django.utils.translation import activate
from sentry_sdk.api import set_tag from sentry_sdk.api import set_tag
from structlog.contextvars import STRUCTLOG_KEY_PREFIX from structlog.contextvars import STRUCTLOG_KEY_PREFIX
@ -31,20 +31,16 @@ class ImpersonateMiddleware:
def __call__(self, request: HttpRequest) -> HttpResponse: def __call__(self, request: HttpRequest) -> HttpResponse:
# No permission checks are done here, they need to be checked before # No permission checks are done here, they need to be checked before
# SESSION_KEY_IMPERSONATE_USER is set. # SESSION_KEY_IMPERSONATE_USER is set.
locale_to_set = None
if request.user.is_authenticated: if request.user.is_authenticated:
locale = request.user.locale(request) locale = request.user.locale(request)
if locale != "": if locale != "":
locale_to_set = locale activate(locale)
if SESSION_KEY_IMPERSONATE_USER in request.session: if SESSION_KEY_IMPERSONATE_USER in request.session:
request.user = request.session[SESSION_KEY_IMPERSONATE_USER] request.user = request.session[SESSION_KEY_IMPERSONATE_USER]
# Ensure that the user is active, otherwise nothing will work # Ensure that the user is active, otherwise nothing will work
request.user.is_active = True request.user.is_active = True
if locale_to_set:
with override(locale_to_set):
return self.get_response(request)
return self.get_response(request) return self.get_response(request)

View File

@ -7,13 +7,12 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
from authentik.providers.ldap.models import LDAPProvider from authentik.providers.ldap.models import LDAPProvider
from authentik.providers.scim.models import SCIMProvider from authentik.providers.scim.models import SCIMProvider
for model in [LDAPProvider, SCIMProvider]: for model in [LDAPProvider, SCIMProvider]:
try: try:
for obj in model.objects.using(db_alias).only("is_backchannel"): for obj in model.objects.only("is_backchannel"):
obj.is_backchannel = True obj.is_backchannel = True
obj.save() obj.save()
except (DatabaseError, InternalError, ProgrammingError): except (DatabaseError, InternalError, ProgrammingError):

View File

@ -1,43 +0,0 @@
# Generated by Django 5.0.2 on 2024-02-29 11:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0035_alter_group_options_and_more"),
]
operations = [
migrations.AddField(
model_name="source",
name="group_property_mappings",
field=models.ManyToManyField(
blank=True,
default=None,
related_name="source_grouppropertymappings_set",
to="authentik_core.propertymapping",
),
),
migrations.AddField(
model_name="source",
name="user_property_mappings",
field=models.ManyToManyField(
blank=True,
default=None,
related_name="source_userpropertymappings_set",
to="authentik_core.propertymapping",
),
),
migrations.AlterField(
model_name="source",
name="property_mappings",
field=models.ManyToManyField(
blank=True,
default=None,
related_name="source_set",
to="authentik_core.propertymapping",
),
),
]

Some files were not shown because too many files have changed in this diff Show More