Compare commits
	
		
			1 Commits
		
	
	
		
			celery-2-d
			...
			tests/e2e/
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4b0d641a51 | 
@ -1,5 +1,5 @@
 | 
			
		||||
[bumpversion]
 | 
			
		||||
current_version = 2025.6.3
 | 
			
		||||
current_version = 2025.4.1
 | 
			
		||||
tag = True
 | 
			
		||||
commit = True
 | 
			
		||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
 | 
			
		||||
@ -21,8 +21,6 @@ optional_value = final
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:package.json]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:package-lock.json]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:docker-compose.yml]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:schema.yml]
 | 
			
		||||
@ -33,4 +31,6 @@ optional_value = final
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:internal/constants/constants.go]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:web/src/common/constants.ts]
 | 
			
		||||
 | 
			
		||||
[bumpversion:file:lifecycle/aws/template.yaml]
 | 
			
		||||
 | 
			
		||||
@ -5,10 +5,8 @@ dist/**
 | 
			
		||||
build/**
 | 
			
		||||
build_docs/**
 | 
			
		||||
*Dockerfile
 | 
			
		||||
**/*Dockerfile
 | 
			
		||||
blueprints/local
 | 
			
		||||
.git
 | 
			
		||||
!gen-ts-api/node_modules
 | 
			
		||||
!gen-ts-api/dist/**
 | 
			
		||||
!gen-go-api/
 | 
			
		||||
.venv
 | 
			
		||||
 | 
			
		||||
@ -7,9 +7,6 @@ charset = utf-8
 | 
			
		||||
trim_trailing_whitespace = true
 | 
			
		||||
insert_final_newline = true
 | 
			
		||||
 | 
			
		||||
[*.toml]
 | 
			
		||||
indent_size = 2
 | 
			
		||||
 | 
			
		||||
[*.html]
 | 
			
		||||
indent_size = 2
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/setup/action.yml
									
									
									
									
										vendored
									
									
								
							@ -36,7 +36,7 @@ runs:
 | 
			
		||||
      with:
 | 
			
		||||
        go-version-file: "go.mod"
 | 
			
		||||
    - name: Setup docker cache
 | 
			
		||||
      uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
 | 
			
		||||
      uses: ScribeMD/docker-cache@0.5.0
 | 
			
		||||
      with:
 | 
			
		||||
        key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
 | 
			
		||||
    - name: Setup dependencies
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										21
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							@ -23,13 +23,7 @@ updates:
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directories:
 | 
			
		||||
      - "/web"
 | 
			
		||||
      - "/web/packages/sfe"
 | 
			
		||||
      - "/web/packages/core"
 | 
			
		||||
      - "/web/packages/esbuild-plugin-live-reload"
 | 
			
		||||
      - "/packages/prettier-config"
 | 
			
		||||
      - "/packages/tsconfig"
 | 
			
		||||
      - "/packages/docusaurus-config"
 | 
			
		||||
      - "/packages/eslint-config"
 | 
			
		||||
      - "/web/sfe"
 | 
			
		||||
    schedule:
 | 
			
		||||
      interval: daily
 | 
			
		||||
      time: "04:00"
 | 
			
		||||
@ -74,9 +68,6 @@ updates:
 | 
			
		||||
      wdio:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@wdio/*"
 | 
			
		||||
      goauthentik:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@goauthentik/*"
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directory: "/website"
 | 
			
		||||
    schedule:
 | 
			
		||||
@ -97,16 +88,6 @@ updates:
 | 
			
		||||
          - "swc-*"
 | 
			
		||||
          - "lightningcss*"
 | 
			
		||||
          - "@rspack/binding*"
 | 
			
		||||
      goauthentik:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@goauthentik/*"
 | 
			
		||||
      eslint:
 | 
			
		||||
        patterns:
 | 
			
		||||
          - "@eslint/*"
 | 
			
		||||
          - "@typescript-eslint/*"
 | 
			
		||||
          - "eslint-*"
 | 
			
		||||
          - "eslint"
 | 
			
		||||
          - "typescript-eslint"
 | 
			
		||||
  - package-ecosystem: npm
 | 
			
		||||
    directory: "/lifecycle/aws"
 | 
			
		||||
    schedule:
 | 
			
		||||
 | 
			
		||||
@ -38,8 +38,6 @@ jobs:
 | 
			
		||||
      # Needed for attestation
 | 
			
		||||
      id-token: write
 | 
			
		||||
      attestations: write
 | 
			
		||||
      # Needed for checkout
 | 
			
		||||
      contents: read
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - uses: docker/setup-qemu-action@v3.6.0
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/api-ts-publish.yml
									
									
									
									
										vendored
									
									
								
							@ -53,7 +53,6 @@ jobs:
 | 
			
		||||
          signoff: true
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/workflows/ci-main-daily.yml
									
									
									
									
										vendored
									
									
								
							@ -9,15 +9,14 @@ on:
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  test-container:
 | 
			
		||||
    if: ${{ github.repository != 'goauthentik/authentik-internal' }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        version:
 | 
			
		||||
          - docs
 | 
			
		||||
          - version-2025-4
 | 
			
		||||
          - version-2025-2
 | 
			
		||||
          - version-2024-12
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - run: |
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										8
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.github/workflows/ci-main.yml
									
									
									
									
										vendored
									
									
								
							@ -62,7 +62,6 @@ jobs:
 | 
			
		||||
        psql:
 | 
			
		||||
          - 15-alpine
 | 
			
		||||
          - 16-alpine
 | 
			
		||||
          - 17-alpine
 | 
			
		||||
        run_id: [1, 2, 3, 4, 5]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
@ -117,7 +116,6 @@ jobs:
 | 
			
		||||
        psql:
 | 
			
		||||
          - 15-alpine
 | 
			
		||||
          - 16-alpine
 | 
			
		||||
          - 17-alpine
 | 
			
		||||
        run_id: [1, 2, 3, 4, 5]
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
@ -202,7 +200,7 @@ jobs:
 | 
			
		||||
        uses: actions/cache@v4
 | 
			
		||||
        with:
 | 
			
		||||
          path: web/dist
 | 
			
		||||
          key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
 | 
			
		||||
          key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
 | 
			
		||||
      - name: prepare web ui
 | 
			
		||||
        if: steps.cache-web.outputs.cache-hit != 'true'
 | 
			
		||||
        working-directory: web
 | 
			
		||||
@ -247,13 +245,11 @@ jobs:
 | 
			
		||||
      # Needed for attestation
 | 
			
		||||
      id-token: write
 | 
			
		||||
      attestations: write
 | 
			
		||||
      # Needed for checkout
 | 
			
		||||
      contents: read
 | 
			
		||||
    needs: ci-core-mark
 | 
			
		||||
    uses: ./.github/workflows/_reusable-docker-build.yaml
 | 
			
		||||
    secrets: inherit
 | 
			
		||||
    with:
 | 
			
		||||
      image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }}
 | 
			
		||||
      image_name: ghcr.io/goauthentik/dev-server
 | 
			
		||||
      release: false
 | 
			
		||||
  pr-comment:
 | 
			
		||||
    needs:
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/ci-outpost.yml
									
									
									
									
										vendored
									
									
								
							@ -59,7 +59,6 @@ jobs:
 | 
			
		||||
        with:
 | 
			
		||||
          jobs: ${{ toJSON(needs) }}
 | 
			
		||||
  build-container:
 | 
			
		||||
    if: ${{ github.repository != 'goauthentik/authentik-internal' }}
 | 
			
		||||
    timeout-minutes: 120
 | 
			
		||||
    needs:
 | 
			
		||||
      - ci-outpost-mark
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.github/workflows/ci-website.yml
									
									
									
									
										vendored
									
									
								
							@ -49,7 +49,6 @@ jobs:
 | 
			
		||||
      matrix:
 | 
			
		||||
        job:
 | 
			
		||||
          - build
 | 
			
		||||
          - build:integrations
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - uses: actions/setup-node@v4
 | 
			
		||||
@ -62,65 +61,14 @@ jobs:
 | 
			
		||||
      - name: build
 | 
			
		||||
        working-directory: website/
 | 
			
		||||
        run: npm run ${{ matrix.job }}
 | 
			
		||||
  build-container:
 | 
			
		||||
    if: ${{ github.repository != 'goauthentik/authentik-internal' }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    permissions:
 | 
			
		||||
      # Needed to upload container images to ghcr.io
 | 
			
		||||
      packages: write
 | 
			
		||||
      # Needed for attestation
 | 
			
		||||
      id-token: write
 | 
			
		||||
      attestations: write
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
        with:
 | 
			
		||||
          ref: ${{ github.event.pull_request.head.sha }}
 | 
			
		||||
      - name: Set up QEMU
 | 
			
		||||
        uses: docker/setup-qemu-action@v3.6.0
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3
 | 
			
		||||
      - name: prepare variables
 | 
			
		||||
        uses: ./.github/actions/docker-push-variables
 | 
			
		||||
        id: ev
 | 
			
		||||
        env:
 | 
			
		||||
          DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
 | 
			
		||||
        with:
 | 
			
		||||
          image-name: ghcr.io/goauthentik/dev-docs
 | 
			
		||||
      - name: Login to Container Registry
 | 
			
		||||
        if: ${{ steps.ev.outputs.shouldPush == 'true' }}
 | 
			
		||||
        uses: docker/login-action@v3
 | 
			
		||||
        with:
 | 
			
		||||
          registry: ghcr.io
 | 
			
		||||
          username: ${{ github.repository_owner }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
      - name: Build Docker Image
 | 
			
		||||
        id: push
 | 
			
		||||
        uses: docker/build-push-action@v6
 | 
			
		||||
        with:
 | 
			
		||||
          tags: ${{ steps.ev.outputs.imageTags }}
 | 
			
		||||
          file: website/Dockerfile
 | 
			
		||||
          push: ${{ steps.ev.outputs.shouldPush == 'true' }}
 | 
			
		||||
          platforms: linux/amd64,linux/arm64
 | 
			
		||||
          context: .
 | 
			
		||||
          cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
 | 
			
		||||
          cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
 | 
			
		||||
      - uses: actions/attest-build-provenance@v2
 | 
			
		||||
        id: attest
 | 
			
		||||
        if: ${{ steps.ev.outputs.shouldPush == 'true' }}
 | 
			
		||||
        with:
 | 
			
		||||
          subject-name: ${{ steps.ev.outputs.attestImageNames }}
 | 
			
		||||
          subject-digest: ${{ steps.push.outputs.digest }}
 | 
			
		||||
          push-to-registry: true
 | 
			
		||||
  ci-website-mark:
 | 
			
		||||
    if: always()
 | 
			
		||||
    needs:
 | 
			
		||||
      - lint
 | 
			
		||||
      - test
 | 
			
		||||
      - build
 | 
			
		||||
      - build-container
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: re-actors/alls-green@release/v1
 | 
			
		||||
        with:
 | 
			
		||||
          jobs: ${{ toJSON(needs) }}
 | 
			
		||||
          allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							@ -2,7 +2,7 @@ name: "CodeQL"
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    branches: [main, next, version*]
 | 
			
		||||
    branches: [main, "*", next, version*]
 | 
			
		||||
  pull_request:
 | 
			
		||||
    branches: [main]
 | 
			
		||||
  schedule:
 | 
			
		||||
 | 
			
		||||
@ -37,7 +37,6 @@ jobs:
 | 
			
		||||
          signoff: true
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/workflows/image-compress.yml
									
									
									
									
										vendored
									
									
								
							@ -53,7 +53,6 @@ jobs:
 | 
			
		||||
          body: ${{ steps.compress.outputs.markdown }}
 | 
			
		||||
          delete-branch: true
 | 
			
		||||
          signoff: true
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
 | 
			
		||||
        with:
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.github/workflows/packages-npm-publish.yml
									
									
									
									
										vendored
									
									
								
							@ -7,7 +7,6 @@ on:
 | 
			
		||||
      - packages/eslint-config/**
 | 
			
		||||
      - packages/prettier-config/**
 | 
			
		||||
      - packages/tsconfig/**
 | 
			
		||||
      - web/packages/esbuild-plugin-live-reload/**
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
jobs:
 | 
			
		||||
  publish:
 | 
			
		||||
@ -17,28 +16,27 @@ jobs:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      matrix:
 | 
			
		||||
        package:
 | 
			
		||||
          - packages/docusaurus-config
 | 
			
		||||
          - packages/eslint-config
 | 
			
		||||
          - packages/prettier-config
 | 
			
		||||
          - packages/tsconfig
 | 
			
		||||
          - web/packages/esbuild-plugin-live-reload
 | 
			
		||||
          - docusaurus-config
 | 
			
		||||
          - eslint-config
 | 
			
		||||
          - prettier-config
 | 
			
		||||
          - tsconfig
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
        with:
 | 
			
		||||
          fetch-depth: 2
 | 
			
		||||
      - uses: actions/setup-node@v4
 | 
			
		||||
        with:
 | 
			
		||||
          node-version-file: ${{ matrix.package }}/package.json
 | 
			
		||||
          node-version-file: packages/${{ matrix.package }}/package.json
 | 
			
		||||
          registry-url: "https://registry.npmjs.org"
 | 
			
		||||
      - name: Get changed files
 | 
			
		||||
        id: changed-files
 | 
			
		||||
        uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
 | 
			
		||||
        with:
 | 
			
		||||
          files: |
 | 
			
		||||
            ${{ matrix.package }}/package.json
 | 
			
		||||
            packages/${{ matrix.package }}/package.json
 | 
			
		||||
      - name: Publish package
 | 
			
		||||
        if: steps.changed-files.outputs.any_changed == 'true'
 | 
			
		||||
        working-directory: ${{ matrix.package }}
 | 
			
		||||
        working-directory: packages/${{ matrix.package}}
 | 
			
		||||
        run: |
 | 
			
		||||
          npm ci
 | 
			
		||||
          npm run build
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/workflows/release-publish.yml
									
									
									
									
										vendored
									
									
								
							@ -20,49 +20,6 @@ jobs:
 | 
			
		||||
      release: true
 | 
			
		||||
      registry_dockerhub: true
 | 
			
		||||
      registry_ghcr: true
 | 
			
		||||
  build-docs:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    permissions:
 | 
			
		||||
      # Needed to upload container images to ghcr.io
 | 
			
		||||
      packages: write
 | 
			
		||||
      # Needed for attestation
 | 
			
		||||
      id-token: write
 | 
			
		||||
      attestations: write
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - name: Set up QEMU
 | 
			
		||||
        uses: docker/setup-qemu-action@v3.6.0
 | 
			
		||||
      - name: Set up Docker Buildx
 | 
			
		||||
        uses: docker/setup-buildx-action@v3
 | 
			
		||||
      - name: prepare variables
 | 
			
		||||
        uses: ./.github/actions/docker-push-variables
 | 
			
		||||
        id: ev
 | 
			
		||||
        env:
 | 
			
		||||
          DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
 | 
			
		||||
        with:
 | 
			
		||||
          image-name: ghcr.io/goauthentik/docs
 | 
			
		||||
      - name: Login to GitHub Container Registry
 | 
			
		||||
        uses: docker/login-action@v3
 | 
			
		||||
        with:
 | 
			
		||||
          registry: ghcr.io
 | 
			
		||||
          username: ${{ github.repository_owner }}
 | 
			
		||||
          password: ${{ secrets.GITHUB_TOKEN }}
 | 
			
		||||
      - name: Build Docker Image
 | 
			
		||||
        id: push
 | 
			
		||||
        uses: docker/build-push-action@v6
 | 
			
		||||
        with:
 | 
			
		||||
          tags: ${{ steps.ev.outputs.imageTags }}
 | 
			
		||||
          file: website/Dockerfile
 | 
			
		||||
          push: true
 | 
			
		||||
          platforms: linux/amd64,linux/arm64
 | 
			
		||||
          context: .
 | 
			
		||||
      - uses: actions/attest-build-provenance@v2
 | 
			
		||||
        id: attest
 | 
			
		||||
        if: true
 | 
			
		||||
        with:
 | 
			
		||||
          subject-name: ${{ steps.ev.outputs.attestImageNames }}
 | 
			
		||||
          subject-digest: ${{ steps.push.outputs.digest }}
 | 
			
		||||
          push-to-registry: true
 | 
			
		||||
  build-outpost:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    permissions:
 | 
			
		||||
@ -236,6 +193,6 @@ jobs:
 | 
			
		||||
          SENTRY_ORG: authentik-security-inc
 | 
			
		||||
          SENTRY_PROJECT: authentik
 | 
			
		||||
        with:
 | 
			
		||||
          release: authentik@${{ steps.ev.outputs.version }}
 | 
			
		||||
          version: authentik@${{ steps.ev.outputs.version }}
 | 
			
		||||
          sourcemaps: "./web/dist"
 | 
			
		||||
          url_prefix: "~/static/dist"
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.github/workflows/repo-mirror-cleanup.yml
									
									
									
									
										vendored
									
									
								
							@ -1,21 +0,0 @@
 | 
			
		||||
name: "authentik-repo-mirror-cleanup"
 | 
			
		||||
 | 
			
		||||
on:
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  to_internal:
 | 
			
		||||
    if: ${{ github.repository != 'goauthentik/authentik-internal' }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
        with:
 | 
			
		||||
          fetch-depth: 0
 | 
			
		||||
      - if: ${{ env.MIRROR_KEY != '' }}
 | 
			
		||||
        uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
 | 
			
		||||
        with:
 | 
			
		||||
          target_repo_url: git@github.com:goauthentik/authentik-internal.git
 | 
			
		||||
          ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
 | 
			
		||||
          args: --tags --force --prune
 | 
			
		||||
        env:
 | 
			
		||||
          MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}
 | 
			
		||||
							
								
								
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.github/workflows/repo-mirror.yml
									
									
									
									
										vendored
									
									
								
							@ -11,10 +11,11 @@ jobs:
 | 
			
		||||
        with:
 | 
			
		||||
          fetch-depth: 0
 | 
			
		||||
      - if: ${{ env.MIRROR_KEY != '' }}
 | 
			
		||||
        uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
 | 
			
		||||
        uses: pixta-dev/repository-mirroring-action@v1
 | 
			
		||||
        with:
 | 
			
		||||
          target_repo_url: git@github.com:goauthentik/authentik-internal.git
 | 
			
		||||
          ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
 | 
			
		||||
          args: --tags --force
 | 
			
		||||
          target_repo_url:
 | 
			
		||||
            git@github.com:goauthentik/authentik-internal.git
 | 
			
		||||
          ssh_private_key:
 | 
			
		||||
            ${{ secrets.GH_MIRROR_KEY }}
 | 
			
		||||
        env:
 | 
			
		||||
          MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}
 | 
			
		||||
 | 
			
		||||
@ -16,7 +16,6 @@ env:
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  compile:
 | 
			
		||||
    if: ${{ github.repository != 'goauthentik/authentik-internal' }}
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    steps:
 | 
			
		||||
      - id: generate_token
 | 
			
		||||
@ -53,6 +52,3 @@ jobs:
 | 
			
		||||
          body: "core, web: update translations"
 | 
			
		||||
          delete-branch: true
 | 
			
		||||
          signoff: true
 | 
			
		||||
          labels: dependencies
 | 
			
		||||
          # ID from https://api.github.com/users/authentik-automation[bot]
 | 
			
		||||
          author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										15
									
								
								.github/workflows/translation-rename.yml
									
									
									
									
										vendored
									
									
								
							@ -15,7 +15,6 @@ jobs:
 | 
			
		||||
    runs-on: ubuntu-latest
 | 
			
		||||
    if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
 | 
			
		||||
    steps:
 | 
			
		||||
      - uses: actions/checkout@v4
 | 
			
		||||
      - id: generate_token
 | 
			
		||||
        uses: tibdex/github-app-token@v2
 | 
			
		||||
        with:
 | 
			
		||||
@ -26,13 +25,23 @@ jobs:
 | 
			
		||||
        env:
 | 
			
		||||
          GH_TOKEN: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
        run: |
 | 
			
		||||
          title=$(gh pr view ${{ github.event.pull_request.number }} --json  "title" -q ".title")
 | 
			
		||||
          title=$(curl -q -L \
 | 
			
		||||
            -H "Accept: application/vnd.github+json" \
 | 
			
		||||
            -H "Authorization: Bearer ${GH_TOKEN}" \
 | 
			
		||||
            -H "X-GitHub-Api-Version: 2022-11-28" \
 | 
			
		||||
            https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
 | 
			
		||||
          echo "title=${title}" >> "$GITHUB_OUTPUT"
 | 
			
		||||
      - name: Rename
 | 
			
		||||
        env:
 | 
			
		||||
          GH_TOKEN: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
        run: |
 | 
			
		||||
          gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies
 | 
			
		||||
          curl -L \
 | 
			
		||||
            -X PATCH \
 | 
			
		||||
            -H "Accept: application/vnd.github+json" \
 | 
			
		||||
            -H "Authorization: Bearer ${GH_TOKEN}" \
 | 
			
		||||
            -H "X-GitHub-Api-Version: 2022-11-28" \
 | 
			
		||||
            https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
 | 
			
		||||
            -d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
 | 
			
		||||
      - uses: peter-evans/enable-pull-request-automerge@v3
 | 
			
		||||
        with:
 | 
			
		||||
          token: ${{ steps.generate_token.outputs.token }}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@ -100,6 +100,9 @@ ipython_config.py
 | 
			
		||||
# pyenv
 | 
			
		||||
.python-version
 | 
			
		||||
 | 
			
		||||
# celery beat schedule file
 | 
			
		||||
celerybeat-schedule
 | 
			
		||||
 | 
			
		||||
# SageMath parsed files
 | 
			
		||||
*.sage.py
 | 
			
		||||
 | 
			
		||||
@ -163,6 +166,8 @@ dmypy.json
 | 
			
		||||
 | 
			
		||||
# pyenv
 | 
			
		||||
 | 
			
		||||
# celery beat schedule file
 | 
			
		||||
 | 
			
		||||
# SageMath parsed files
 | 
			
		||||
 | 
			
		||||
# Environments
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							@ -6,15 +6,13 @@
 | 
			
		||||
        "!Context scalar",
 | 
			
		||||
        "!Enumerate sequence",
 | 
			
		||||
        "!Env scalar",
 | 
			
		||||
        "!Env sequence",
 | 
			
		||||
        "!Find sequence",
 | 
			
		||||
        "!Format sequence",
 | 
			
		||||
        "!If sequence",
 | 
			
		||||
        "!Index scalar",
 | 
			
		||||
        "!KeyOf scalar",
 | 
			
		||||
        "!Value scalar",
 | 
			
		||||
        "!AtIndex scalar",
 | 
			
		||||
        "!ParseJSON scalar"
 | 
			
		||||
        "!AtIndex scalar"
 | 
			
		||||
    ],
 | 
			
		||||
    "typescript.preferences.importModuleSpecifier": "non-relative",
 | 
			
		||||
    "typescript.preferences.importModuleSpecifierEnding": "index",
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										52
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										52
									
								
								Dockerfile
									
									
									
									
									
								
							@ -1,7 +1,26 @@
 | 
			
		||||
# syntax=docker/dockerfile:1
 | 
			
		||||
 | 
			
		||||
# Stage 1: Build webui
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder
 | 
			
		||||
# Stage 1: Build website
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder
 | 
			
		||||
 | 
			
		||||
ENV NODE_ENV=production
 | 
			
		||||
 | 
			
		||||
WORKDIR /work/website
 | 
			
		||||
 | 
			
		||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
 | 
			
		||||
    --mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
 | 
			
		||||
    --mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
 | 
			
		||||
    npm ci --include=dev
 | 
			
		||||
 | 
			
		||||
COPY ./website /work/website/
 | 
			
		||||
COPY ./blueprints /work/blueprints/
 | 
			
		||||
COPY ./schema.yml /work/
 | 
			
		||||
COPY ./SECURITY.md /work/
 | 
			
		||||
 | 
			
		||||
RUN npm run build-bundled
 | 
			
		||||
 | 
			
		||||
# Stage 2: Build webui
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder
 | 
			
		||||
 | 
			
		||||
ARG GIT_BUILD_HASH
 | 
			
		||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
 | 
			
		||||
@ -13,7 +32,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
 | 
			
		||||
    --mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
 | 
			
		||||
    --mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \
 | 
			
		||||
    --mount=type=bind,target=/work/web/scripts,src=./web/scripts \
 | 
			
		||||
    --mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \
 | 
			
		||||
    --mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
 | 
			
		||||
    npm ci --include=dev
 | 
			
		||||
 | 
			
		||||
COPY ./package.json /work
 | 
			
		||||
@ -24,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
 | 
			
		||||
RUN npm run build && \
 | 
			
		||||
    npm run build:sfe
 | 
			
		||||
 | 
			
		||||
# Stage 2: Build go proxy
 | 
			
		||||
# Stage 3: Build go proxy
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
 | 
			
		||||
 | 
			
		||||
ARG TARGETOS
 | 
			
		||||
@ -49,8 +68,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
 | 
			
		||||
COPY ./cmd /go/src/goauthentik.io/cmd
 | 
			
		||||
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
 | 
			
		||||
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
 | 
			
		||||
COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
 | 
			
		||||
COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
 | 
			
		||||
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
 | 
			
		||||
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
 | 
			
		||||
COPY ./internal /go/src/goauthentik.io/internal
 | 
			
		||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
 | 
			
		||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
 | 
			
		||||
@ -61,7 +80,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
 | 
			
		||||
    CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \
 | 
			
		||||
    go build -o /go/authentik ./cmd/server
 | 
			
		||||
 | 
			
		||||
# Stage 3: MaxMind GeoIP
 | 
			
		||||
# Stage 4: MaxMind GeoIP
 | 
			
		||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
 | 
			
		||||
 | 
			
		||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
 | 
			
		||||
@ -74,10 +93,10 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
 | 
			
		||||
    mkdir -p /usr/share/GeoIP && \
 | 
			
		||||
    /bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
 | 
			
		||||
 | 
			
		||||
# Stage 4: Download uv
 | 
			
		||||
FROM ghcr.io/astral-sh/uv:0.7.17 AS uv
 | 
			
		||||
# Stage 5: Base python image
 | 
			
		||||
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
 | 
			
		||||
# Stage 5: Download uv
 | 
			
		||||
FROM ghcr.io/astral-sh/uv:0.7.4 AS uv
 | 
			
		||||
# Stage 6: Base python image
 | 
			
		||||
FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base
 | 
			
		||||
 | 
			
		||||
ENV VENV_PATH="/ak-root/.venv" \
 | 
			
		||||
    PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
 | 
			
		||||
@ -90,7 +109,7 @@ WORKDIR /ak-root/
 | 
			
		||||
 | 
			
		||||
COPY --from=uv /uv /uvx /bin/
 | 
			
		||||
 | 
			
		||||
# Stage 6: Python dependencies
 | 
			
		||||
# Stage 7: Python dependencies
 | 
			
		||||
FROM python-base AS python-deps
 | 
			
		||||
 | 
			
		||||
ARG TARGETARCH
 | 
			
		||||
@ -122,11 +141,10 @@ ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
 | 
			
		||||
 | 
			
		||||
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
 | 
			
		||||
    --mount=type=bind,target=uv.lock,src=uv.lock \
 | 
			
		||||
    --mount=type=bind,target=packages,src=packages \
 | 
			
		||||
    --mount=type=cache,target=/root/.cache/uv \
 | 
			
		||||
    uv sync --frozen --no-install-project --no-dev
 | 
			
		||||
 | 
			
		||||
# Stage 7: Run
 | 
			
		||||
# Stage 8: Run
 | 
			
		||||
FROM python-base AS final-image
 | 
			
		||||
 | 
			
		||||
ARG VERSION
 | 
			
		||||
@ -168,10 +186,10 @@ COPY ./blueprints /blueprints
 | 
			
		||||
COPY ./lifecycle/ /lifecycle
 | 
			
		||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
 | 
			
		||||
COPY --from=go-builder /go/authentik /bin/authentik
 | 
			
		||||
COPY ./packages/ /ak-root/packages
 | 
			
		||||
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
 | 
			
		||||
COPY --from=node-builder /work/web/dist/ /web/dist/
 | 
			
		||||
COPY --from=node-builder /work/web/authentik/ /web/authentik/
 | 
			
		||||
COPY --from=web-builder /work/web/dist/ /web/dist/
 | 
			
		||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
 | 
			
		||||
COPY --from=website-builder /work/website/build/ /website/help/
 | 
			
		||||
COPY --from=geoip /usr/share/GeoIP /geoip
 | 
			
		||||
 | 
			
		||||
USER 1000
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										61
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										61
									
								
								Makefile
									
									
									
									
									
								
							@ -1,17 +1,16 @@
 | 
			
		||||
.PHONY: gen dev-reset all clean test web website
 | 
			
		||||
 | 
			
		||||
SHELL := /usr/bin/env bash
 | 
			
		||||
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
 | 
			
		||||
.SHELLFLAGS += ${SHELLFLAGS} -e
 | 
			
		||||
PWD = $(shell pwd)
 | 
			
		||||
UID = $(shell id -u)
 | 
			
		||||
GID = $(shell id -g)
 | 
			
		||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
 | 
			
		||||
PY_SOURCES = authentik packages tests scripts lifecycle .github
 | 
			
		||||
PY_SOURCES = authentik tests scripts lifecycle .github
 | 
			
		||||
DOCKER_IMAGE ?= "authentik:test"
 | 
			
		||||
 | 
			
		||||
GEN_API_TS = gen-ts-api
 | 
			
		||||
GEN_API_PY = gen-py-api
 | 
			
		||||
GEN_API_GO = gen-go-api
 | 
			
		||||
GEN_API_TS = "gen-ts-api"
 | 
			
		||||
GEN_API_PY = "gen-py-api"
 | 
			
		||||
GEN_API_GO = "gen-go-api"
 | 
			
		||||
 | 
			
		||||
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
 | 
			
		||||
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
 | 
			
		||||
@ -86,10 +85,6 @@ dev-create-db:
 | 
			
		||||
 | 
			
		||||
dev-reset: dev-drop-db dev-create-db migrate  ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
 | 
			
		||||
 | 
			
		||||
update-test-mmdb:  ## Update test GeoIP and ASN Databases
 | 
			
		||||
	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
 | 
			
		||||
	curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
 | 
			
		||||
 | 
			
		||||
#########################
 | 
			
		||||
## API Schema
 | 
			
		||||
#########################
 | 
			
		||||
@ -98,7 +93,7 @@ gen-build:  ## Extract the schema from the database
 | 
			
		||||
	AUTHENTIK_DEBUG=true \
 | 
			
		||||
		AUTHENTIK_TENANTS__ENABLED=true \
 | 
			
		||||
		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
 | 
			
		||||
		uv run ak make_blueprint_schema --file blueprints/schema.json
 | 
			
		||||
		uv run ak make_blueprint_schema > blueprints/schema.json
 | 
			
		||||
	AUTHENTIK_DEBUG=true \
 | 
			
		||||
		AUTHENTIK_TENANTS__ENABLED=true \
 | 
			
		||||
		AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
 | 
			
		||||
@ -122,19 +117,14 @@ gen-diff:  ## (Release) generate the changelog diff between the current schema a
 | 
			
		||||
	npx prettier --write diff.md
 | 
			
		||||
 | 
			
		||||
gen-clean-ts:  ## Remove generated API client for Typescript
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_TS}/
 | 
			
		||||
	rm -rf ${PWD}/web/node_modules/@goauthentik/api/
 | 
			
		||||
	rm -rf ./${GEN_API_TS}/
 | 
			
		||||
	rm -rf ./web/node_modules/@goauthentik/api/
 | 
			
		||||
 | 
			
		||||
gen-clean-go:  ## Remove generated API client for Go
 | 
			
		||||
	mkdir -p ${PWD}/${GEN_API_GO}
 | 
			
		||||
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
 | 
			
		||||
	make -C ${PWD}/${GEN_API_GO} clean
 | 
			
		||||
else
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_GO}
 | 
			
		||||
endif
 | 
			
		||||
	rm -rf ./${GEN_API_GO}/
 | 
			
		||||
 | 
			
		||||
gen-clean-py:  ## Remove generated API client for Python
 | 
			
		||||
	rm -rf ${PWD}/${GEN_API_PY}/
 | 
			
		||||
	rm -rf ./${GEN_API_PY}/
 | 
			
		||||
 | 
			
		||||
gen-clean: gen-clean-ts gen-clean-go gen-clean-py  ## Remove generated API clients
 | 
			
		||||
 | 
			
		||||
@ -150,9 +140,9 @@ gen-client-ts: gen-clean-ts  ## Build and install the authentik API for Typescri
 | 
			
		||||
		--additional-properties=npmVersion=${NPM_VERSION} \
 | 
			
		||||
		--git-repo-id authentik \
 | 
			
		||||
		--git-user-id goauthentik
 | 
			
		||||
 | 
			
		||||
	cd ${PWD}/${GEN_API_TS} && npm link
 | 
			
		||||
	cd ${PWD}/web && npm link @goauthentik/api
 | 
			
		||||
	mkdir -p web/node_modules/@goauthentik/api
 | 
			
		||||
	cd ./${GEN_API_TS} && npm i
 | 
			
		||||
	\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
 | 
			
		||||
 | 
			
		||||
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
 | 
			
		||||
	docker run \
 | 
			
		||||
@ -166,17 +156,24 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
 | 
			
		||||
		--additional-properties=packageVersion=${NPM_VERSION} \
 | 
			
		||||
		--git-repo-id authentik \
 | 
			
		||||
		--git-user-id goauthentik
 | 
			
		||||
	pip install ./${GEN_API_PY}
 | 
			
		||||
 | 
			
		||||
gen-client-go: gen-clean-go  ## Build and install the authentik API for Golang
 | 
			
		||||
	mkdir -p ${PWD}/${GEN_API_GO}
 | 
			
		||||
ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
 | 
			
		||||
	git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO}
 | 
			
		||||
else
 | 
			
		||||
	cd ${PWD}/${GEN_API_GO} && git pull
 | 
			
		||||
endif
 | 
			
		||||
	cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO}
 | 
			
		||||
	make -C ${PWD}/${GEN_API_GO} build
 | 
			
		||||
	mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
 | 
			
		||||
	wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
 | 
			
		||||
	cp schema.yml ./${GEN_API_GO}/
 | 
			
		||||
	docker run \
 | 
			
		||||
		--rm -v ${PWD}/${GEN_API_GO}:/local \
 | 
			
		||||
		--user ${UID}:${GID} \
 | 
			
		||||
		docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
 | 
			
		||||
		-i /local/schema.yml \
 | 
			
		||||
		-g go \
 | 
			
		||||
		-o /local/ \
 | 
			
		||||
		-c /local/config.yaml
 | 
			
		||||
	go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
 | 
			
		||||
	rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
 | 
			
		||||
 | 
			
		||||
gen-dev-config:  ## Generate a local development config file
 | 
			
		||||
	uv run scripts/generate_config.py
 | 
			
		||||
@ -247,7 +244,7 @@ docker:  ## Build a docker image of the current source tree
 | 
			
		||||
	DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
 | 
			
		||||
 | 
			
		||||
test-docker:
 | 
			
		||||
	BUILD=true ${PWD}/scripts/test_docker.sh
 | 
			
		||||
	BUILD=true ./scripts/test_docker.sh
 | 
			
		||||
 | 
			
		||||
#########################
 | 
			
		||||
## CI
 | 
			
		||||
 | 
			
		||||
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
 | 
			
		||||
 | 
			
		||||
| Version   | Supported |
 | 
			
		||||
| --------- | --------- |
 | 
			
		||||
| 2025.2.x  | ✅        |
 | 
			
		||||
| 2025.4.x  | ✅        |
 | 
			
		||||
| 2025.6.x  | ✅        |
 | 
			
		||||
 | 
			
		||||
## Reporting a Vulnerability
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -2,7 +2,7 @@
 | 
			
		||||
 | 
			
		||||
from os import environ
 | 
			
		||||
 | 
			
		||||
__version__ = "2025.6.3"
 | 
			
		||||
__version__ = "2025.4.1"
 | 
			
		||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										79
									
								
								authentik/admin/api/metrics.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,79 @@
 | 
			
		||||
"""authentik administration metrics"""
 | 
			
		||||
 | 
			
		||||
from datetime import timedelta
 | 
			
		||||
 | 
			
		||||
from django.db.models.functions import ExtractHour
 | 
			
		||||
from drf_spectacular.utils import extend_schema, extend_schema_field
 | 
			
		||||
from guardian.shortcuts import get_objects_for_user
 | 
			
		||||
from rest_framework.fields import IntegerField, SerializerMethodField
 | 
			
		||||
from rest_framework.permissions import IsAuthenticated
 | 
			
		||||
from rest_framework.request import Request
 | 
			
		||||
from rest_framework.response import Response
 | 
			
		||||
from rest_framework.views import APIView
 | 
			
		||||
 | 
			
		||||
from authentik.core.api.utils import PassiveSerializer
 | 
			
		||||
from authentik.events.models import EventAction
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CoordinateSerializer(PassiveSerializer):
 | 
			
		||||
    """Coordinates for diagrams"""
 | 
			
		||||
 | 
			
		||||
    x_cord = IntegerField(read_only=True)
 | 
			
		||||
    y_cord = IntegerField(read_only=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LoginMetricsSerializer(PassiveSerializer):
 | 
			
		||||
    """Login Metrics per 1h"""
 | 
			
		||||
 | 
			
		||||
    logins = SerializerMethodField()
 | 
			
		||||
    logins_failed = SerializerMethodField()
 | 
			
		||||
    authorizations = SerializerMethodField()
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_logins(self, _):
 | 
			
		||||
        """Get successful logins per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.LOGIN
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_logins_failed(self, _):
 | 
			
		||||
        """Get failed logins per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.LOGIN_FAILED
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_authorizations(self, _):
 | 
			
		||||
        """Get successful authorizations per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.AUTHORIZE_APPLICATION
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AdministrationMetricsViewSet(APIView):
 | 
			
		||||
    """Login Metrics per 1h"""
 | 
			
		||||
 | 
			
		||||
    permission_classes = [IsAuthenticated]
 | 
			
		||||
 | 
			
		||||
    @extend_schema(responses={200: LoginMetricsSerializer(many=False)})
 | 
			
		||||
    def get(self, request: Request) -> Response:
 | 
			
		||||
        """Login Metrics per 1h"""
 | 
			
		||||
        serializer = LoginMetricsSerializer(True)
 | 
			
		||||
        serializer.context["user"] = request.user
 | 
			
		||||
        return Response(serializer.data)
 | 
			
		||||
@ -1,7 +1,6 @@
 | 
			
		||||
"""authentik administration overview"""
 | 
			
		||||
 | 
			
		||||
from django.core.cache import cache
 | 
			
		||||
from django_tenants.utils import get_public_schema_name
 | 
			
		||||
from drf_spectacular.utils import extend_schema
 | 
			
		||||
from packaging.version import parse
 | 
			
		||||
from rest_framework.fields import SerializerMethodField
 | 
			
		||||
@ -14,7 +13,6 @@ from authentik import __version__, get_build_hash
 | 
			
		||||
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
 | 
			
		||||
from authentik.core.api.utils import PassiveSerializer
 | 
			
		||||
from authentik.outposts.models import Outpost
 | 
			
		||||
from authentik.tenants.utils import get_current_tenant
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class VersionSerializer(PassiveSerializer):
 | 
			
		||||
@ -37,11 +35,9 @@ class VersionSerializer(PassiveSerializer):
 | 
			
		||||
 | 
			
		||||
    def get_version_latest(self, _) -> str:
 | 
			
		||||
        """Get latest version from cache"""
 | 
			
		||||
        if get_current_tenant().schema_name == get_public_schema_name():
 | 
			
		||||
            return __version__
 | 
			
		||||
        version_in_cache = cache.get(VERSION_CACHE_KEY)
 | 
			
		||||
        if not version_in_cache:  # pragma: no cover
 | 
			
		||||
            update_latest_version.send()
 | 
			
		||||
            update_latest_version.delay()
 | 
			
		||||
            return __version__
 | 
			
		||||
        return version_in_cache
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								authentik/admin/api/workers.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,57 @@
 | 
			
		||||
"""authentik administration overview"""
 | 
			
		||||
 | 
			
		||||
from socket import gethostname
 | 
			
		||||
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from drf_spectacular.utils import extend_schema, inline_serializer
 | 
			
		||||
from packaging.version import parse
 | 
			
		||||
from rest_framework.fields import BooleanField, CharField
 | 
			
		||||
from rest_framework.request import Request
 | 
			
		||||
from rest_framework.response import Response
 | 
			
		||||
from rest_framework.views import APIView
 | 
			
		||||
 | 
			
		||||
from authentik import get_full_version
 | 
			
		||||
from authentik.rbac.permissions import HasPermission
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class WorkerView(APIView):
 | 
			
		||||
    """Get currently connected worker count."""
 | 
			
		||||
 | 
			
		||||
    permission_classes = [HasPermission("authentik_rbac.view_system_info")]
 | 
			
		||||
 | 
			
		||||
    @extend_schema(
 | 
			
		||||
        responses=inline_serializer(
 | 
			
		||||
            "Worker",
 | 
			
		||||
            fields={
 | 
			
		||||
                "worker_id": CharField(),
 | 
			
		||||
                "version": CharField(),
 | 
			
		||||
                "version_matching": BooleanField(),
 | 
			
		||||
            },
 | 
			
		||||
            many=True,
 | 
			
		||||
        )
 | 
			
		||||
    )
 | 
			
		||||
    def get(self, request: Request) -> Response:
 | 
			
		||||
        """Get currently connected worker count."""
 | 
			
		||||
        raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
 | 
			
		||||
        our_version = parse(get_full_version())
 | 
			
		||||
        response = []
 | 
			
		||||
        for worker in raw:
 | 
			
		||||
            key = list(worker.keys())[0]
 | 
			
		||||
            version = worker[key].get("version")
 | 
			
		||||
            version_matching = False
 | 
			
		||||
            if version:
 | 
			
		||||
                version_matching = parse(version) == our_version
 | 
			
		||||
            response.append(
 | 
			
		||||
                {"worker_id": key, "version": version, "version_matching": version_matching}
 | 
			
		||||
            )
 | 
			
		||||
        # In debug we run with `task_always_eager`, so tasks are ran on the main process
 | 
			
		||||
        if settings.DEBUG:  # pragma: no cover
 | 
			
		||||
            response.append(
 | 
			
		||||
                {
 | 
			
		||||
                    "worker_id": f"authentik-debug@{gethostname()}",
 | 
			
		||||
                    "version": get_full_version(),
 | 
			
		||||
                    "version_matching": True,
 | 
			
		||||
                }
 | 
			
		||||
            )
 | 
			
		||||
        return Response(response)
 | 
			
		||||
@ -3,9 +3,6 @@
 | 
			
		||||
from prometheus_client import Info
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
 | 
			
		||||
 | 
			
		||||
@ -17,31 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
 | 
			
		||||
    label = "authentik_admin"
 | 
			
		||||
    verbose_name = "authentik Admin"
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_global
 | 
			
		||||
    def clear_update_notifications(self):
 | 
			
		||||
        """Clear update notifications on startup if the notification was for the version
 | 
			
		||||
        we're running now."""
 | 
			
		||||
        from packaging.version import parse
 | 
			
		||||
 | 
			
		||||
        from authentik.admin.tasks import LOCAL_VERSION
 | 
			
		||||
        from authentik.events.models import EventAction, Notification
 | 
			
		||||
 | 
			
		||||
        for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
 | 
			
		||||
            if "new_version" not in notification.event.context:
 | 
			
		||||
                continue
 | 
			
		||||
            notification_version = notification.event.context["new_version"]
 | 
			
		||||
            if LOCAL_VERSION >= parse(notification_version):
 | 
			
		||||
                notification.delete()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def global_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.admin.tasks import update_latest_version
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=update_latest_version,
 | 
			
		||||
                crontab=f"{fqdn_rand('admin_latest_version')} * * * *",
 | 
			
		||||
                paused=CONFIG.get_bool("disable_update_check"),
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								authentik/admin/settings.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,13 @@
 | 
			
		||||
"""authentik admin settings"""
 | 
			
		||||
 | 
			
		||||
from celery.schedules import crontab
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    "admin_latest_version": {
 | 
			
		||||
        "task": "authentik.admin.tasks.update_latest_version",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								authentik/admin/signals.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,35 @@
 | 
			
		||||
"""admin signals"""
 | 
			
		||||
 | 
			
		||||
from django.dispatch import receiver
 | 
			
		||||
from packaging.version import parse
 | 
			
		||||
from prometheus_client import Gauge
 | 
			
		||||
 | 
			
		||||
from authentik import get_full_version
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
from authentik.root.monitoring import monitoring_set
 | 
			
		||||
 | 
			
		||||
GAUGE_WORKERS = Gauge(
 | 
			
		||||
    "authentik_admin_workers",
 | 
			
		||||
    "Currently connected workers, their versions and if they are the same version as authentik",
 | 
			
		||||
    ["version", "version_matched"],
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_version = parse(get_full_version())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@receiver(monitoring_set)
 | 
			
		||||
def monitoring_set_workers(sender, **kwargs):
 | 
			
		||||
    """Set worker gauge"""
 | 
			
		||||
    raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
 | 
			
		||||
    worker_version_count = {}
 | 
			
		||||
    for worker in raw:
 | 
			
		||||
        key = list(worker.keys())[0]
 | 
			
		||||
        version = worker[key].get("version")
 | 
			
		||||
        version_matching = False
 | 
			
		||||
        if version:
 | 
			
		||||
            version_matching = parse(version) == _version
 | 
			
		||||
        worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
 | 
			
		||||
        worker_version_count[version]["count"] += 1
 | 
			
		||||
    for version, stats in worker_version_count.items():
 | 
			
		||||
        GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
 | 
			
		||||
@ -1,19 +1,19 @@
 | 
			
		||||
"""authentik admin tasks"""
 | 
			
		||||
 | 
			
		||||
from django.core.cache import cache
 | 
			
		||||
from django.db import DatabaseError, InternalError, ProgrammingError
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_dramatiq_postgres.middleware import CurrentTask
 | 
			
		||||
from dramatiq import actor
 | 
			
		||||
from packaging.version import parse
 | 
			
		||||
from requests import RequestException
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik import __version__, get_build_hash
 | 
			
		||||
from authentik.admin.apps import PROM_INFO
 | 
			
		||||
from authentik.events.models import Event, EventAction
 | 
			
		||||
from authentik.events.models import Event, EventAction, Notification
 | 
			
		||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.lib.utils.http import get_http_session
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
VERSION_NULL = "0.0.0"
 | 
			
		||||
@ -33,12 +33,27 @@ def _set_prom_info():
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Update latest version info."))
 | 
			
		||||
def update_latest_version():
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
@CELERY_APP.task(
 | 
			
		||||
    throws=(DatabaseError, ProgrammingError, InternalError),
 | 
			
		||||
)
 | 
			
		||||
def clear_update_notifications():
 | 
			
		||||
    """Clear update notifications on startup if the notification was for the version
 | 
			
		||||
    we're running now."""
 | 
			
		||||
    for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
 | 
			
		||||
        if "new_version" not in notification.event.context:
 | 
			
		||||
            continue
 | 
			
		||||
        notification_version = notification.event.context["new_version"]
 | 
			
		||||
        if LOCAL_VERSION >= parse(notification_version):
 | 
			
		||||
            notification.delete()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
@prefill_task
 | 
			
		||||
def update_latest_version(self: SystemTask):
 | 
			
		||||
    """Update latest version info"""
 | 
			
		||||
    if CONFIG.get_bool("disable_update_check"):
 | 
			
		||||
        cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
 | 
			
		||||
        self.info("Version check disabled.")
 | 
			
		||||
        self.set_status(TaskStatus.WARNING, "Version check disabled.")
 | 
			
		||||
        return
 | 
			
		||||
    try:
 | 
			
		||||
        response = get_http_session().get(
 | 
			
		||||
@ -48,7 +63,7 @@ def update_latest_version():
 | 
			
		||||
        data = response.json()
 | 
			
		||||
        upstream_version = data.get("stable", {}).get("version")
 | 
			
		||||
        cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
 | 
			
		||||
        self.info("Successfully updated latest Version")
 | 
			
		||||
        self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
 | 
			
		||||
        _set_prom_info()
 | 
			
		||||
        # Check if upstream version is newer than what we're running,
 | 
			
		||||
        # and if no event exists yet, create one.
 | 
			
		||||
@ -71,7 +86,7 @@ def update_latest_version():
 | 
			
		||||
            ).save()
 | 
			
		||||
    except (RequestException, IndexError) as exc:
 | 
			
		||||
        cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
 | 
			
		||||
        raise exc
 | 
			
		||||
        self.set_error(exc)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_set_prom_info()
 | 
			
		||||
 | 
			
		||||
@ -29,6 +29,18 @@ class TestAdminAPI(TestCase):
 | 
			
		||||
        body = loads(response.content)
 | 
			
		||||
        self.assertEqual(body["version_current"], __version__)
 | 
			
		||||
 | 
			
		||||
    def test_workers(self):
 | 
			
		||||
        """Test Workers API"""
 | 
			
		||||
        response = self.client.get(reverse("authentik_api:admin_workers"))
 | 
			
		||||
        self.assertEqual(response.status_code, 200)
 | 
			
		||||
        body = loads(response.content)
 | 
			
		||||
        self.assertEqual(len(body), 0)
 | 
			
		||||
 | 
			
		||||
    def test_metrics(self):
 | 
			
		||||
        """Test metrics API"""
 | 
			
		||||
        response = self.client.get(reverse("authentik_api:admin_metrics"))
 | 
			
		||||
        self.assertEqual(response.status_code, 200)
 | 
			
		||||
 | 
			
		||||
    def test_apps(self):
 | 
			
		||||
        """Test apps API"""
 | 
			
		||||
        response = self.client.get(reverse("authentik_api:apps-list"))
 | 
			
		||||
 | 
			
		||||
@ -1,12 +1,12 @@
 | 
			
		||||
"""test admin tasks"""
 | 
			
		||||
 | 
			
		||||
from django.apps import apps
 | 
			
		||||
from django.core.cache import cache
 | 
			
		||||
from django.test import TestCase
 | 
			
		||||
from requests_mock import Mocker
 | 
			
		||||
 | 
			
		||||
from authentik.admin.tasks import (
 | 
			
		||||
    VERSION_CACHE_KEY,
 | 
			
		||||
    clear_update_notifications,
 | 
			
		||||
    update_latest_version,
 | 
			
		||||
)
 | 
			
		||||
from authentik.events.models import Event, EventAction
 | 
			
		||||
@ -30,7 +30,7 @@ class TestAdminTasks(TestCase):
 | 
			
		||||
        """Test Update checker with valid response"""
 | 
			
		||||
        with Mocker() as mocker, CONFIG.patch("disable_update_check", False):
 | 
			
		||||
            mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
 | 
			
		||||
            update_latest_version.send()
 | 
			
		||||
            update_latest_version.delay().get()
 | 
			
		||||
            self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
 | 
			
		||||
            self.assertTrue(
 | 
			
		||||
                Event.objects.filter(
 | 
			
		||||
@ -40,7 +40,7 @@ class TestAdminTasks(TestCase):
 | 
			
		||||
                ).exists()
 | 
			
		||||
            )
 | 
			
		||||
            # test that a consecutive check doesn't create a duplicate event
 | 
			
		||||
            update_latest_version.send()
 | 
			
		||||
            update_latest_version.delay().get()
 | 
			
		||||
            self.assertEqual(
 | 
			
		||||
                len(
 | 
			
		||||
                    Event.objects.filter(
 | 
			
		||||
@ -56,7 +56,7 @@ class TestAdminTasks(TestCase):
 | 
			
		||||
        """Test Update checker with invalid response"""
 | 
			
		||||
        with Mocker() as mocker:
 | 
			
		||||
            mocker.get("https://version.goauthentik.io/version.json", status_code=400)
 | 
			
		||||
            update_latest_version.send()
 | 
			
		||||
            update_latest_version.delay().get()
 | 
			
		||||
            self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
 | 
			
		||||
            self.assertFalse(
 | 
			
		||||
                Event.objects.filter(
 | 
			
		||||
@ -67,19 +67,17 @@ class TestAdminTasks(TestCase):
 | 
			
		||||
    def test_version_disabled(self):
 | 
			
		||||
        """Test Update checker while its disabled"""
 | 
			
		||||
        with CONFIG.patch("disable_update_check", True):
 | 
			
		||||
            update_latest_version.send()
 | 
			
		||||
            update_latest_version.delay().get()
 | 
			
		||||
            self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
 | 
			
		||||
 | 
			
		||||
    def test_clear_update_notifications(self):
 | 
			
		||||
        """Test clear of previous notification"""
 | 
			
		||||
        admin_config = apps.get_app_config("authentik_admin")
 | 
			
		||||
        Event.objects.create(
 | 
			
		||||
            action=EventAction.UPDATE_AVAILABLE,
 | 
			
		||||
            context={"new_version": "99999999.9999999.9999999"},
 | 
			
		||||
            action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
 | 
			
		||||
        )
 | 
			
		||||
        Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
 | 
			
		||||
        Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
 | 
			
		||||
        admin_config.clear_update_notifications()
 | 
			
		||||
        clear_update_notifications()
 | 
			
		||||
        self.assertFalse(
 | 
			
		||||
            Event.objects.filter(
 | 
			
		||||
                action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"
 | 
			
		||||
 | 
			
		||||
@ -3,14 +3,22 @@
 | 
			
		||||
from django.urls import path
 | 
			
		||||
 | 
			
		||||
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
 | 
			
		||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
 | 
			
		||||
from authentik.admin.api.system import SystemView
 | 
			
		||||
from authentik.admin.api.version import VersionView
 | 
			
		||||
from authentik.admin.api.version_history import VersionHistoryViewSet
 | 
			
		||||
from authentik.admin.api.workers import WorkerView
 | 
			
		||||
 | 
			
		||||
api_urlpatterns = [
 | 
			
		||||
    ("admin/apps", AppsViewSet, "apps"),
 | 
			
		||||
    ("admin/models", ModelViewSet, "models"),
 | 
			
		||||
    path(
 | 
			
		||||
        "admin/metrics/",
 | 
			
		||||
        AdministrationMetricsViewSet.as_view(),
 | 
			
		||||
        name="admin_metrics",
 | 
			
		||||
    ),
 | 
			
		||||
    path("admin/version/", VersionView.as_view(), name="admin_version"),
 | 
			
		||||
    ("admin/version/history", VersionHistoryViewSet, "version_history"),
 | 
			
		||||
    path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
 | 
			
		||||
    path("admin/system/", SystemView.as_view(), name="admin_system"),
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
@ -1,13 +1,12 @@
 | 
			
		||||
"""authentik API AppConfig"""
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from django.apps import AppConfig
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikAPIConfig(ManagedAppConfig):
 | 
			
		||||
class AuthentikAPIConfig(AppConfig):
 | 
			
		||||
    """authentik API Config"""
 | 
			
		||||
 | 
			
		||||
    name = "authentik.api"
 | 
			
		||||
    label = "authentik_api"
 | 
			
		||||
    mountpoint = "api/"
 | 
			
		||||
    verbose_name = "authentik API"
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
@ -1,12 +1,9 @@
 | 
			
		||||
"""API Authentication"""
 | 
			
		||||
 | 
			
		||||
from hmac import compare_digest
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from tempfile import gettempdir
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from django.contrib.auth.models import AnonymousUser
 | 
			
		||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
 | 
			
		||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
 | 
			
		||||
from rest_framework.exceptions import AuthenticationFailed
 | 
			
		||||
@ -14,17 +11,11 @@ from rest_framework.request import Request
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.core.middleware import CTX_AUTH_VIA
 | 
			
		||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
 | 
			
		||||
from authentik.core.models import Token, TokenIntents, User
 | 
			
		||||
from authentik.outposts.models import Outpost
 | 
			
		||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
_tmp = Path(gettempdir())
 | 
			
		||||
try:
 | 
			
		||||
    with open(_tmp / "authentik-core-ipc.key") as _f:
 | 
			
		||||
        ipc_key = _f.read()
 | 
			
		||||
except OSError:
 | 
			
		||||
    ipc_key = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_auth(header: bytes) -> str | None:
 | 
			
		||||
@ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
 | 
			
		||||
    if user:
 | 
			
		||||
        CTX_AUTH_VIA.set("secret_key")
 | 
			
		||||
        return user
 | 
			
		||||
    # then try to auth via secret key (for embedded outpost/etc)
 | 
			
		||||
    user = token_ipc(auth_credentials)
 | 
			
		||||
    if user:
 | 
			
		||||
        CTX_AUTH_VIA.set("ipc")
 | 
			
		||||
        return user
 | 
			
		||||
    raise AuthenticationFailed("Token invalid/expired")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None:
 | 
			
		||||
    return outpost.user
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class IPCUser(AnonymousUser):
 | 
			
		||||
    """'Virtual' user for IPC communication between authentik core and the authentik router"""
 | 
			
		||||
 | 
			
		||||
    username = "authentik:system"
 | 
			
		||||
    is_active = True
 | 
			
		||||
    is_superuser = True
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def type(self):
 | 
			
		||||
        return UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
 | 
			
		||||
    def has_perm(self, perm, obj=None):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def has_perms(self, perm_list, obj=None):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def has_module_perms(self, module):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_anonymous(self):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def is_authenticated(self):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def token_ipc(value: str) -> User | None:
 | 
			
		||||
    """Check if the token is the secret key
 | 
			
		||||
    and return the service account for the managed outpost"""
 | 
			
		||||
    if not ipc_key or not compare_digest(value, ipc_key):
 | 
			
		||||
        return None
 | 
			
		||||
    return IPCUser()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TokenAuthentication(BaseAuthentication):
 | 
			
		||||
    """Token-based authentication using HTTP Bearer authentication"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -39,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
 | 
			
		||||
        """Ensure the path (if set) specified is retrievable"""
 | 
			
		||||
        if path == "" or path.startswith(OCI_PREFIX):
 | 
			
		||||
            return path
 | 
			
		||||
        files: list[dict] = blueprints_find_dict.send().get_result(block=True)
 | 
			
		||||
        files: list[dict] = blueprints_find_dict.delay().get()
 | 
			
		||||
        if path not in [file["path"] for file in files]:
 | 
			
		||||
            raise ValidationError(_("Blueprint file does not exist"))
 | 
			
		||||
        return path
 | 
			
		||||
@ -115,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    @action(detail=False, pagination_class=None, filter_backends=[])
 | 
			
		||||
    def available(self, request: Request) -> Response:
 | 
			
		||||
        """Get blueprints"""
 | 
			
		||||
        files: list[dict] = blueprints_find_dict.send().get_result(block=True)
 | 
			
		||||
        files: list[dict] = blueprints_find_dict.delay().get()
 | 
			
		||||
        return Response(files)
 | 
			
		||||
 | 
			
		||||
    @permission_required("authentik_blueprints.view_blueprintinstance")
 | 
			
		||||
@ -129,5 +129,5 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def apply(self, request: Request, *args, **kwargs) -> Response:
 | 
			
		||||
        """Apply a blueprint"""
 | 
			
		||||
        blueprint = self.get_object()
 | 
			
		||||
        apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint)
 | 
			
		||||
        apply_blueprint.delay(str(blueprint.pk)).get()
 | 
			
		||||
        return self.retrieve(request, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
@ -6,12 +6,9 @@ from inspect import ismethod
 | 
			
		||||
 | 
			
		||||
from django.apps import AppConfig
 | 
			
		||||
from django.db import DatabaseError, InternalError, ProgrammingError
 | 
			
		||||
from dramatiq.broker import get_broker
 | 
			
		||||
from structlog.stdlib import BoundLogger, get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
from authentik.root.signals import startup
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ManagedAppConfig(AppConfig):
 | 
			
		||||
@ -37,7 +34,7 @@ class ManagedAppConfig(AppConfig):
 | 
			
		||||
 | 
			
		||||
    def import_related(self):
 | 
			
		||||
        """Automatically import related modules which rely on just being imported
 | 
			
		||||
        to register themselves (mainly django signals and tasks)"""
 | 
			
		||||
        to register themselves (mainly django signals and celery tasks)"""
 | 
			
		||||
 | 
			
		||||
        def import_relative(rel_module: str):
 | 
			
		||||
            try:
 | 
			
		||||
@ -83,16 +80,6 @@ class ManagedAppConfig(AppConfig):
 | 
			
		||||
        func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
 | 
			
		||||
        return func
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        """Get a list of schedule specs that must exist in each tenant"""
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def global_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        """Get a list of schedule specs that must exist in the default tenant"""
 | 
			
		||||
        return []
 | 
			
		||||
 | 
			
		||||
    def _reconcile_tenant(self) -> None:
 | 
			
		||||
        """reconcile ourselves for tenanted methods"""
 | 
			
		||||
        from authentik.tenants.models import Tenant
 | 
			
		||||
@ -113,12 +100,8 @@ class ManagedAppConfig(AppConfig):
 | 
			
		||||
        """
 | 
			
		||||
        from django_tenants.utils import get_public_schema_name, schema_context
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            with schema_context(get_public_schema_name()):
 | 
			
		||||
                self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
 | 
			
		||||
        except (DatabaseError, ProgrammingError, InternalError) as exc:
 | 
			
		||||
            self.logger.debug("Failed to access database to run reconcile", exc=exc)
 | 
			
		||||
            return
 | 
			
		||||
        with schema_context(get_public_schema_name()):
 | 
			
		||||
            self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
 | 
			
		||||
@ -129,29 +112,19 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
 | 
			
		||||
    verbose_name = "authentik Blueprints"
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_global
 | 
			
		||||
    def load_blueprints_v1_tasks(self):
 | 
			
		||||
        """Load v1 tasks"""
 | 
			
		||||
        self.import_module("authentik.blueprints.v1.tasks")
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_tenant
 | 
			
		||||
    def blueprints_discovery(self):
 | 
			
		||||
        """Run blueprint discovery"""
 | 
			
		||||
        from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
 | 
			
		||||
 | 
			
		||||
        blueprints_discovery.delay()
 | 
			
		||||
        clear_failed_blueprints.delay()
 | 
			
		||||
 | 
			
		||||
    def import_models(self):
 | 
			
		||||
        super().import_models()
 | 
			
		||||
        self.import_module("authentik.blueprints.v1.meta.apply_blueprint")
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_global
 | 
			
		||||
    def tasks_middlewares(self):
 | 
			
		||||
        from authentik.blueprints.v1.tasks import BlueprintWatcherMiddleware
 | 
			
		||||
 | 
			
		||||
        get_broker().add_middleware(BlueprintWatcherMiddleware())
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=blueprints_discovery,
 | 
			
		||||
                crontab=f"{fqdn_rand('blueprints_v1_discover')} * * * *",
 | 
			
		||||
                send_on_startup=True,
 | 
			
		||||
            ),
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=clear_failed_blueprints,
 | 
			
		||||
                crontab=f"{fqdn_rand('blueprints_v1_cleanup')} * * * *",
 | 
			
		||||
                send_on_startup=True,
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
@ -72,33 +72,20 @@ class Command(BaseCommand):
 | 
			
		||||
                    "additionalProperties": True,
 | 
			
		||||
                },
 | 
			
		||||
                "entries": {
 | 
			
		||||
                    "anyOf": [
 | 
			
		||||
                        {
 | 
			
		||||
                            "type": "array",
 | 
			
		||||
                            "items": {"$ref": "#/$defs/blueprint_entry"},
 | 
			
		||||
                        },
 | 
			
		||||
                        {
 | 
			
		||||
                            "type": "object",
 | 
			
		||||
                            "additionalProperties": {
 | 
			
		||||
                                "type": "array",
 | 
			
		||||
                                "items": {"$ref": "#/$defs/blueprint_entry"},
 | 
			
		||||
                            },
 | 
			
		||||
                        },
 | 
			
		||||
                    ],
 | 
			
		||||
                    "type": "array",
 | 
			
		||||
                    "items": {
 | 
			
		||||
                        "oneOf": [],
 | 
			
		||||
                    },
 | 
			
		||||
                },
 | 
			
		||||
            },
 | 
			
		||||
            "$defs": {"blueprint_entry": {"oneOf": []}},
 | 
			
		||||
            "$defs": {},
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    def add_arguments(self, parser):
 | 
			
		||||
        parser.add_argument("--file", type=str)
 | 
			
		||||
 | 
			
		||||
    @no_translations
 | 
			
		||||
    def handle(self, *args, file: str, **options):
 | 
			
		||||
    def handle(self, *args, **options):
 | 
			
		||||
        """Generate JSON Schema for blueprints"""
 | 
			
		||||
        self.build()
 | 
			
		||||
        with open(file, "w") as _schema:
 | 
			
		||||
            _schema.write(dumps(self.schema, indent=4, default=Command.json_default))
 | 
			
		||||
        self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def json_default(value: Any) -> Any:
 | 
			
		||||
@ -125,7 +112,7 @@ class Command(BaseCommand):
 | 
			
		||||
                }
 | 
			
		||||
            )
 | 
			
		||||
            model_path = f"{model._meta.app_label}.{model._meta.model_name}"
 | 
			
		||||
            self.schema["$defs"]["blueprint_entry"]["oneOf"].append(
 | 
			
		||||
            self.schema["properties"]["entries"]["items"]["oneOf"].append(
 | 
			
		||||
                self.template_entry(model_path, model, serializer)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
@ -147,7 +134,7 @@ class Command(BaseCommand):
 | 
			
		||||
                "id": {"type": "string"},
 | 
			
		||||
                "state": {
 | 
			
		||||
                    "type": "string",
 | 
			
		||||
                    "enum": sorted([s.value for s in BlueprintEntryDesiredState]),
 | 
			
		||||
                    "enum": [s.value for s in BlueprintEntryDesiredState],
 | 
			
		||||
                    "default": "present",
 | 
			
		||||
                },
 | 
			
		||||
                "conditions": {"type": "array", "items": {"type": "boolean"}},
 | 
			
		||||
@ -218,7 +205,7 @@ class Command(BaseCommand):
 | 
			
		||||
                "type": "object",
 | 
			
		||||
                "required": ["permission"],
 | 
			
		||||
                "properties": {
 | 
			
		||||
                    "permission": {"type": "string", "enum": sorted(perms)},
 | 
			
		||||
                    "permission": {"type": "string", "enum": perms},
 | 
			
		||||
                    "user": {"type": "integer"},
 | 
			
		||||
                    "role": {"type": "string"},
 | 
			
		||||
                },
 | 
			
		||||
 | 
			
		||||
@ -3,7 +3,6 @@
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from uuid import uuid4
 | 
			
		||||
 | 
			
		||||
from django.contrib.contenttypes.fields import GenericRelation
 | 
			
		||||
from django.contrib.postgres.fields import ArrayField
 | 
			
		||||
from django.db import models
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
@ -72,13 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
 | 
			
		||||
    enabled = models.BooleanField(default=True)
 | 
			
		||||
    managed_models = ArrayField(models.TextField(), default=list)
 | 
			
		||||
 | 
			
		||||
    # Manual link to tasks instead of using TasksModel because of loop imports
 | 
			
		||||
    tasks = GenericRelation(
 | 
			
		||||
        "authentik_tasks.Task",
 | 
			
		||||
        content_type_field="rel_obj_content_type",
 | 
			
		||||
        object_id_field="rel_obj_id",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        verbose_name = _("Blueprint Instance")
 | 
			
		||||
        verbose_name_plural = _("Blueprint Instances")
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								authentik/blueprints/settings.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,18 @@
 | 
			
		||||
"""blueprint Settings"""
 | 
			
		||||
 | 
			
		||||
from celery.schedules import crontab
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    "blueprints_v1_discover": {
 | 
			
		||||
        "task": "authentik.blueprints.v1.tasks.blueprints_discovery",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
    "blueprints_v1_cleanup": {
 | 
			
		||||
        "task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
@ -1,2 +0,0 @@
 | 
			
		||||
# Import all v1 tasks for auto task discovery
 | 
			
		||||
from authentik.blueprints.v1.tasks import *  # noqa: F403
 | 
			
		||||
@ -1,11 +1,10 @@
 | 
			
		||||
version: 1
 | 
			
		||||
entries:
 | 
			
		||||
  foo:
 | 
			
		||||
      - identifiers:
 | 
			
		||||
            name: "%(id)s"
 | 
			
		||||
            slug: "%(id)s"
 | 
			
		||||
        model: authentik_flows.flow
 | 
			
		||||
        state: present
 | 
			
		||||
        attrs:
 | 
			
		||||
            designation: stage_configuration
 | 
			
		||||
            title: foo
 | 
			
		||||
    - identifiers:
 | 
			
		||||
          name: "%(id)s"
 | 
			
		||||
          slug: "%(id)s"
 | 
			
		||||
      model: authentik_flows.flow
 | 
			
		||||
      state: present
 | 
			
		||||
      attrs:
 | 
			
		||||
          designation: stage_configuration
 | 
			
		||||
          title: foo
 | 
			
		||||
 | 
			
		||||
@ -37,7 +37,6 @@ entries:
 | 
			
		||||
    - attrs:
 | 
			
		||||
          attributes:
 | 
			
		||||
              env_null: !Env [bar-baz, null]
 | 
			
		||||
              json_parse: !ParseJSON '{"foo": "bar"}'
 | 
			
		||||
              policy_pk1:
 | 
			
		||||
                  !Format [
 | 
			
		||||
                      "%s-%s",
 | 
			
		||||
 | 
			
		||||
@ -1,14 +0,0 @@
 | 
			
		||||
from django.test import TestCase
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from authentik.enterprise.apps import EnterpriseConfig
 | 
			
		||||
from authentik.lib.utils.reflection import get_apps
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TestManagedAppConfig(TestCase):
 | 
			
		||||
    def test_apps_use_managed_app_config(self):
 | 
			
		||||
        for app in get_apps():
 | 
			
		||||
            if app.name.startswith("authentik.enterprise"):
 | 
			
		||||
                self.assertIn(EnterpriseConfig, app.__class__.__bases__)
 | 
			
		||||
            else:
 | 
			
		||||
                self.assertIn(ManagedAppConfig, app.__class__.__bases__)
 | 
			
		||||
@ -35,6 +35,6 @@ def blueprint_tester(file_name: Path) -> Callable:
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
 | 
			
		||||
    if "local" in str(blueprint_file) or "testing" in str(blueprint_file):
 | 
			
		||||
    if "local" in str(blueprint_file):
 | 
			
		||||
        continue
 | 
			
		||||
    setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))
 | 
			
		||||
 | 
			
		||||
@ -5,6 +5,7 @@ from collections.abc import Callable
 | 
			
		||||
from django.apps import apps
 | 
			
		||||
from django.test import TestCase
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.v1.importer import is_model_allowed
 | 
			
		||||
from authentik.lib.models import SerializerModel
 | 
			
		||||
from authentik.providers.oauth2.models import RefreshToken
 | 
			
		||||
 | 
			
		||||
@ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
 | 
			
		||||
            return
 | 
			
		||||
        model_class = test_model()
 | 
			
		||||
        self.assertTrue(isinstance(model_class, SerializerModel))
 | 
			
		||||
        # Models that have subclasses don't have to have a serializer
 | 
			
		||||
        if len(test_model.__subclasses__()) > 0:
 | 
			
		||||
            return
 | 
			
		||||
        self.assertIsNotNone(model_class.serializer)
 | 
			
		||||
        if model_class.serializer.Meta().model == RefreshToken:
 | 
			
		||||
            return
 | 
			
		||||
        self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model))
 | 
			
		||||
        self.assertEqual(model_class.serializer.Meta().model, test_model)
 | 
			
		||||
 | 
			
		||||
    return tester
 | 
			
		||||
 | 
			
		||||
@ -36,6 +34,6 @@ for app in apps.get_app_configs():
 | 
			
		||||
    if not app.label.startswith("authentik"):
 | 
			
		||||
        continue
 | 
			
		||||
    for model in app.get_models():
 | 
			
		||||
        if not issubclass(model, SerializerModel):
 | 
			
		||||
        if not is_model_allowed(model):
 | 
			
		||||
            continue
 | 
			
		||||
        setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
 | 
			
		||||
 | 
			
		||||
@ -215,7 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
 | 
			
		||||
                    },
 | 
			
		||||
                    "nested_context": "context-nested-value",
 | 
			
		||||
                    "env_null": None,
 | 
			
		||||
                    "json_parse": {"foo": "bar"},
 | 
			
		||||
                    "at_index_sequence": "foo",
 | 
			
		||||
                    "at_index_sequence_default": "non existent",
 | 
			
		||||
                    "at_index_mapping": 2,
 | 
			
		||||
 | 
			
		||||
@ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
 | 
			
		||||
            file.seek(0)
 | 
			
		||||
            file_hash = sha512(file.read().encode()).hexdigest()
 | 
			
		||||
            file.flush()
 | 
			
		||||
            blueprints_discovery.send()
 | 
			
		||||
            blueprints_discovery()
 | 
			
		||||
            instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
 | 
			
		||||
            self.assertEqual(instance.last_applied_hash, file_hash)
 | 
			
		||||
            self.assertEqual(
 | 
			
		||||
@ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
            file.flush()
 | 
			
		||||
            blueprints_discovery.send()
 | 
			
		||||
            blueprints_discovery()
 | 
			
		||||
            blueprint = BlueprintInstance.objects.filter(name="foo").first()
 | 
			
		||||
            self.assertEqual(
 | 
			
		||||
                blueprint.last_applied_hash,
 | 
			
		||||
@ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
            file.flush()
 | 
			
		||||
            blueprints_discovery.send()
 | 
			
		||||
            blueprints_discovery()
 | 
			
		||||
            blueprint.refresh_from_db()
 | 
			
		||||
            self.assertEqual(
 | 
			
		||||
                blueprint.last_applied_hash,
 | 
			
		||||
 | 
			
		||||
@ -6,7 +6,6 @@ from copy import copy
 | 
			
		||||
from dataclasses import asdict, dataclass, field, is_dataclass
 | 
			
		||||
from enum import Enum
 | 
			
		||||
from functools import reduce
 | 
			
		||||
from json import JSONDecodeError, loads
 | 
			
		||||
from operator import ixor
 | 
			
		||||
from os import getenv
 | 
			
		||||
from typing import Any, Literal, Union
 | 
			
		||||
@ -192,18 +191,11 @@ class Blueprint:
 | 
			
		||||
    """Dataclass used for a full export"""
 | 
			
		||||
 | 
			
		||||
    version: int = field(default=1)
 | 
			
		||||
    entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list)
 | 
			
		||||
    entries: list[BlueprintEntry] = field(default_factory=list)
 | 
			
		||||
    context: dict = field(default_factory=dict)
 | 
			
		||||
 | 
			
		||||
    metadata: BlueprintMetadata | None = field(default=None)
 | 
			
		||||
 | 
			
		||||
    def iter_entries(self) -> Iterable[BlueprintEntry]:
 | 
			
		||||
        if isinstance(self.entries, dict):
 | 
			
		||||
            for _section, entries in self.entries.items():
 | 
			
		||||
                yield from entries
 | 
			
		||||
        else:
 | 
			
		||||
            yield from self.entries
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class YAMLTag:
 | 
			
		||||
    """Base class for all YAML Tags"""
 | 
			
		||||
@ -234,7 +226,7 @@ class KeyOf(YAMLTag):
 | 
			
		||||
        self.id_from = node.value
 | 
			
		||||
 | 
			
		||||
    def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
 | 
			
		||||
        for _entry in blueprint.iter_entries():
 | 
			
		||||
        for _entry in blueprint.entries:
 | 
			
		||||
            if _entry.id == self.id_from and _entry._state.instance:
 | 
			
		||||
                # Special handling for PolicyBindingModels, as they'll have a different PK
 | 
			
		||||
                # which is used when creating policy bindings
 | 
			
		||||
@ -292,22 +284,6 @@ class Context(YAMLTag):
 | 
			
		||||
        return value
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ParseJSON(YAMLTag):
 | 
			
		||||
    """Parse JSON from context/env/etc value"""
 | 
			
		||||
 | 
			
		||||
    raw: str
 | 
			
		||||
 | 
			
		||||
    def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self.raw = node.value
 | 
			
		||||
 | 
			
		||||
    def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
 | 
			
		||||
        try:
 | 
			
		||||
            return loads(self.raw)
 | 
			
		||||
        except JSONDecodeError as exc:
 | 
			
		||||
            raise EntryInvalidError.from_entry(exc, entry) from exc
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Format(YAMLTag):
 | 
			
		||||
    """Format a string"""
 | 
			
		||||
 | 
			
		||||
@ -683,7 +659,6 @@ class BlueprintLoader(SafeLoader):
 | 
			
		||||
        self.add_constructor("!Value", Value)
 | 
			
		||||
        self.add_constructor("!Index", Index)
 | 
			
		||||
        self.add_constructor("!AtIndex", AtIndex)
 | 
			
		||||
        self.add_constructor("!ParseJSON", ParseJSON)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EntryInvalidError(SentryIgnoredException):
 | 
			
		||||
 | 
			
		||||
@ -57,6 +57,7 @@ from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
 | 
			
		||||
    EndpointDeviceConnection,
 | 
			
		||||
)
 | 
			
		||||
from authentik.events.logs import LogEvent, capture_logs
 | 
			
		||||
from authentik.events.models import SystemTask
 | 
			
		||||
from authentik.events.utils import cleanse_dict
 | 
			
		||||
from authentik.flows.models import FlowToken, Stage
 | 
			
		||||
from authentik.lib.models import SerializerModel
 | 
			
		||||
@ -76,7 +77,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
 | 
			
		||||
from authentik.rbac.models import Role
 | 
			
		||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
 | 
			
		||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.tenants.models import Tenant
 | 
			
		||||
 | 
			
		||||
# Context set when the serializer is created in a blueprint context
 | 
			
		||||
@ -118,7 +118,7 @@ def excluded_models() -> list[type[Model]]:
 | 
			
		||||
        SCIMProviderGroup,
 | 
			
		||||
        SCIMProviderUser,
 | 
			
		||||
        Tenant,
 | 
			
		||||
        Task,
 | 
			
		||||
        SystemTask,
 | 
			
		||||
        ConnectionToken,
 | 
			
		||||
        AuthorizationCode,
 | 
			
		||||
        AccessToken,
 | 
			
		||||
@ -384,7 +384,7 @@ class Importer:
 | 
			
		||||
    def _apply_models(self, raise_errors=False) -> bool:
 | 
			
		||||
        """Apply (create/update) models yaml"""
 | 
			
		||||
        self.__pk_map = {}
 | 
			
		||||
        for entry in self._import.iter_entries():
 | 
			
		||||
        for entry in self._import.entries:
 | 
			
		||||
            model_app_label, model_name = entry.get_model(self._import).split(".")
 | 
			
		||||
            try:
 | 
			
		||||
                model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
 | 
			
		||||
 | 
			
		||||
@ -44,7 +44,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
 | 
			
		||||
            return MetaResult()
 | 
			
		||||
        LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
 | 
			
		||||
 | 
			
		||||
        apply_blueprint(self.blueprint_instance.pk)
 | 
			
		||||
        apply_blueprint(str(self.blueprint_instance.pk))
 | 
			
		||||
        return MetaResult()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -47,7 +47,7 @@ class MetaModelRegistry:
 | 
			
		||||
        models = apps.get_models()
 | 
			
		||||
        for _, value in self.models.items():
 | 
			
		||||
            models.append(value)
 | 
			
		||||
        return sorted(models, key=str)
 | 
			
		||||
        return models
 | 
			
		||||
 | 
			
		||||
    def get_model(self, app_label: str, model_id: str) -> type[Model]:
 | 
			
		||||
        """Get model checks if any virtual models are registered, and falls back
 | 
			
		||||
 | 
			
		||||
@ -4,17 +4,12 @@ from dataclasses import asdict, dataclass, field
 | 
			
		||||
from hashlib import sha512
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from sys import platform
 | 
			
		||||
from uuid import UUID
 | 
			
		||||
 | 
			
		||||
from dacite.core import from_dict
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from django.db import DatabaseError, InternalError, ProgrammingError
 | 
			
		||||
from django.utils.text import slugify
 | 
			
		||||
from django.utils.timezone import now
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_dramatiq_postgres.middleware import CurrentTask, CurrentTaskNotFound
 | 
			
		||||
from dramatiq.actor import actor
 | 
			
		||||
from dramatiq.middleware import Middleware
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
from watchdog.events import (
 | 
			
		||||
    FileCreatedEvent,
 | 
			
		||||
@ -36,13 +31,15 @@ from authentik.blueprints.v1.importer import Importer
 | 
			
		||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
 | 
			
		||||
from authentik.blueprints.v1.oci import OCI_PREFIX
 | 
			
		||||
from authentik.events.logs import capture_logs
 | 
			
		||||
from authentik.events.models import TaskStatus
 | 
			
		||||
from authentik.events.system_tasks import SystemTask, prefill_task
 | 
			
		||||
from authentik.events.utils import sanitize_dict
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.tasks.schedules.models import Schedule
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
from authentik.tenants.models import Tenant
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
_file_watcher_started = False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass
 | 
			
		||||
@ -56,21 +53,22 @@ class BlueprintFile:
 | 
			
		||||
    meta: BlueprintMetadata | None = field(default=None)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BlueprintWatcherMiddleware(Middleware):
 | 
			
		||||
    def start_blueprint_watcher(self):
 | 
			
		||||
        """Start blueprint watcher"""
 | 
			
		||||
        observer = Observer()
 | 
			
		||||
        kwargs = {}
 | 
			
		||||
        if platform.startswith("linux"):
 | 
			
		||||
            kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
 | 
			
		||||
        observer.schedule(
 | 
			
		||||
            BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
 | 
			
		||||
        )
 | 
			
		||||
        observer.start()
 | 
			
		||||
def start_blueprint_watcher():
 | 
			
		||||
    """Start blueprint watcher, if it's not running already."""
 | 
			
		||||
    # This function might be called twice since it's called on celery startup
 | 
			
		||||
 | 
			
		||||
    def after_worker_boot(self, broker, worker):
 | 
			
		||||
        if not settings.TEST:
 | 
			
		||||
            self.start_blueprint_watcher()
 | 
			
		||||
    global _file_watcher_started  # noqa: PLW0603
 | 
			
		||||
    if _file_watcher_started:
 | 
			
		||||
        return
 | 
			
		||||
    observer = Observer()
 | 
			
		||||
    kwargs = {}
 | 
			
		||||
    if platform.startswith("linux"):
 | 
			
		||||
        kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
 | 
			
		||||
    observer.schedule(
 | 
			
		||||
        BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
 | 
			
		||||
    )
 | 
			
		||||
    observer.start()
 | 
			
		||||
    _file_watcher_started = True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BlueprintEventHandler(FileSystemEventHandler):
 | 
			
		||||
@ -94,7 +92,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
 | 
			
		||||
        LOGGER.debug("new blueprint file created, starting discovery")
 | 
			
		||||
        for tenant in Tenant.objects.filter(ready=True):
 | 
			
		||||
            with tenant:
 | 
			
		||||
                Schedule.dispatch_by_actor(blueprints_discovery)
 | 
			
		||||
                blueprints_discovery.delay()
 | 
			
		||||
 | 
			
		||||
    def on_modified(self, event: FileSystemEvent):
 | 
			
		||||
        """Process file modification"""
 | 
			
		||||
@ -105,14 +103,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
 | 
			
		||||
            with tenant:
 | 
			
		||||
                for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
 | 
			
		||||
                    LOGGER.debug("modified blueprint file, starting apply", instance=instance)
 | 
			
		||||
                    apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance)
 | 
			
		||||
                    apply_blueprint.delay(instance.pk.hex)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(
 | 
			
		||||
    description=_("Find blueprints as `blueprints_find` does, but return a safe dict."),
 | 
			
		||||
@CELERY_APP.task(
 | 
			
		||||
    throws=(DatabaseError, ProgrammingError, InternalError),
 | 
			
		||||
)
 | 
			
		||||
def blueprints_find_dict():
 | 
			
		||||
    """Find blueprints as `blueprints_find` does, but return a safe dict"""
 | 
			
		||||
    blueprints = []
 | 
			
		||||
    for blueprint in blueprints_find():
 | 
			
		||||
        blueprints.append(sanitize_dict(asdict(blueprint)))
 | 
			
		||||
@ -148,19 +146,21 @@ def blueprints_find() -> list[BlueprintFile]:
 | 
			
		||||
    return blueprints
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(
 | 
			
		||||
    description=_("Find blueprints and check if they need to be created in the database."),
 | 
			
		||||
    throws=(DatabaseError, ProgrammingError, InternalError),
 | 
			
		||||
@CELERY_APP.task(
 | 
			
		||||
    throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True
 | 
			
		||||
)
 | 
			
		||||
def blueprints_discovery(path: str | None = None):
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
@prefill_task
 | 
			
		||||
def blueprints_discovery(self: SystemTask, path: str | None = None):
 | 
			
		||||
    """Find blueprints and check if they need to be created in the database"""
 | 
			
		||||
    count = 0
 | 
			
		||||
    for blueprint in blueprints_find():
 | 
			
		||||
        if path and blueprint.path != path:
 | 
			
		||||
            continue
 | 
			
		||||
        check_blueprint_v1_file(blueprint)
 | 
			
		||||
        count += 1
 | 
			
		||||
    self.info(f"Successfully imported {count} files.")
 | 
			
		||||
    self.set_status(
 | 
			
		||||
        TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count))
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_blueprint_v1_file(blueprint: BlueprintFile):
 | 
			
		||||
@ -187,26 +187,22 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
 | 
			
		||||
        )
 | 
			
		||||
    if instance.last_applied_hash != blueprint.hash:
 | 
			
		||||
        LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
 | 
			
		||||
        apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance)
 | 
			
		||||
        apply_blueprint.delay(str(instance.pk))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Apply single blueprint."))
 | 
			
		||||
def apply_blueprint(instance_pk: UUID):
 | 
			
		||||
    try:
 | 
			
		||||
        self: Task = CurrentTask.get_task()
 | 
			
		||||
    except CurrentTaskNotFound:
 | 
			
		||||
        self = Task()
 | 
			
		||||
    self.set_uid(str(instance_pk))
 | 
			
		||||
@CELERY_APP.task(
 | 
			
		||||
    bind=True,
 | 
			
		||||
    base=SystemTask,
 | 
			
		||||
)
 | 
			
		||||
def apply_blueprint(self: SystemTask, instance_pk: str):
 | 
			
		||||
    """Apply single blueprint"""
 | 
			
		||||
    self.save_on_success = False
 | 
			
		||||
    instance: BlueprintInstance | None = None
 | 
			
		||||
    try:
 | 
			
		||||
        instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
 | 
			
		||||
        if not instance:
 | 
			
		||||
            self.warning(f"Could not find blueprint {instance_pk}, skipping")
 | 
			
		||||
        if not instance or not instance.enabled:
 | 
			
		||||
            return
 | 
			
		||||
        self.set_uid(slugify(instance.name))
 | 
			
		||||
        if not instance.enabled:
 | 
			
		||||
            self.info(f"Blueprint {instance.name} is disabled, skipping")
 | 
			
		||||
            return
 | 
			
		||||
        blueprint_content = instance.retrieve()
 | 
			
		||||
        file_hash = sha512(blueprint_content.encode()).hexdigest()
 | 
			
		||||
        importer = Importer.from_string(blueprint_content, instance.context)
 | 
			
		||||
@ -216,18 +212,19 @@ def apply_blueprint(instance_pk: UUID):
 | 
			
		||||
        if not valid:
 | 
			
		||||
            instance.status = BlueprintInstanceStatus.ERROR
 | 
			
		||||
            instance.save()
 | 
			
		||||
            self.logs(logs)
 | 
			
		||||
            self.set_status(TaskStatus.ERROR, *logs)
 | 
			
		||||
            return
 | 
			
		||||
        with capture_logs() as logs:
 | 
			
		||||
            applied = importer.apply()
 | 
			
		||||
            if not applied:
 | 
			
		||||
                instance.status = BlueprintInstanceStatus.ERROR
 | 
			
		||||
                instance.save()
 | 
			
		||||
                self.logs(logs)
 | 
			
		||||
                self.set_status(TaskStatus.ERROR, *logs)
 | 
			
		||||
                return
 | 
			
		||||
        instance.status = BlueprintInstanceStatus.SUCCESSFUL
 | 
			
		||||
        instance.last_applied_hash = file_hash
 | 
			
		||||
        instance.last_applied = now()
 | 
			
		||||
        self.set_status(TaskStatus.SUCCESSFUL)
 | 
			
		||||
    except (
 | 
			
		||||
        OSError,
 | 
			
		||||
        DatabaseError,
 | 
			
		||||
@ -238,14 +235,15 @@ def apply_blueprint(instance_pk: UUID):
 | 
			
		||||
    ) as exc:
 | 
			
		||||
        if instance:
 | 
			
		||||
            instance.status = BlueprintInstanceStatus.ERROR
 | 
			
		||||
        self.error(exc)
 | 
			
		||||
        self.set_error(exc)
 | 
			
		||||
    finally:
 | 
			
		||||
        if instance:
 | 
			
		||||
            instance.save()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Remove blueprints which couldn't be fetched."))
 | 
			
		||||
@CELERY_APP.task()
 | 
			
		||||
def clear_failed_blueprints():
 | 
			
		||||
    """Remove blueprints which couldn't be fetched"""
 | 
			
		||||
    # Exclude OCI blueprints as those might be temporarily unavailable
 | 
			
		||||
    for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX):
 | 
			
		||||
        try:
 | 
			
		||||
 | 
			
		||||
@ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer):
 | 
			
		||||
            "flow_device_code",
 | 
			
		||||
            "default_application",
 | 
			
		||||
            "web_certificate",
 | 
			
		||||
            "client_certificates",
 | 
			
		||||
            "attributes",
 | 
			
		||||
        ]
 | 
			
		||||
        extra_kwargs = {
 | 
			
		||||
@ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "domain",
 | 
			
		||||
        "branding_title",
 | 
			
		||||
        "web_certificate__name",
 | 
			
		||||
        "client_certificates__name",
 | 
			
		||||
    ]
 | 
			
		||||
    filterset_fields = [
 | 
			
		||||
        "brand_uuid",
 | 
			
		||||
@ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        "flow_user_settings",
 | 
			
		||||
        "flow_device_code",
 | 
			
		||||
        "web_certificate",
 | 
			
		||||
        "client_certificates",
 | 
			
		||||
    ]
 | 
			
		||||
    ordering = ["domain"]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,16 +1,14 @@
 | 
			
		||||
"""authentik brands app"""
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from django.apps import AppConfig
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikBrandsConfig(ManagedAppConfig):
 | 
			
		||||
class AuthentikBrandsConfig(AppConfig):
 | 
			
		||||
    """authentik Brand app"""
 | 
			
		||||
 | 
			
		||||
    name = "authentik.brands"
 | 
			
		||||
    label = "authentik_brands"
 | 
			
		||||
    verbose_name = "authentik Brands"
 | 
			
		||||
    default = True
 | 
			
		||||
    mountpoints = {
 | 
			
		||||
        "authentik.brands.urls_root": "",
 | 
			
		||||
    }
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
@ -1,37 +0,0 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-19 15:09
 | 
			
		||||
 | 
			
		||||
import django.db.models.deletion
 | 
			
		||||
from django.db import migrations, models
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Migration(migrations.Migration):
 | 
			
		||||
 | 
			
		||||
    dependencies = [
 | 
			
		||||
        ("authentik_brands", "0009_brand_branding_default_flow_background"),
 | 
			
		||||
        ("authentik_crypto", "0004_alter_certificatekeypair_name"),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.AddField(
 | 
			
		||||
            model_name="brand",
 | 
			
		||||
            name="client_certificates",
 | 
			
		||||
            field=models.ManyToManyField(
 | 
			
		||||
                blank=True,
 | 
			
		||||
                default=None,
 | 
			
		||||
                help_text="Certificates used for client authentication.",
 | 
			
		||||
                to="authentik_crypto.certificatekeypair",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.AlterField(
 | 
			
		||||
            model_name="brand",
 | 
			
		||||
            name="web_certificate",
 | 
			
		||||
            field=models.ForeignKey(
 | 
			
		||||
                default=None,
 | 
			
		||||
                help_text="Web Certificate used by the authentik Core webserver.",
 | 
			
		||||
                null=True,
 | 
			
		||||
                on_delete=django.db.models.deletion.SET_DEFAULT,
 | 
			
		||||
                related_name="+",
 | 
			
		||||
                to="authentik_crypto.certificatekeypair",
 | 
			
		||||
            ),
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
@ -73,13 +73,6 @@ class Brand(SerializerModel):
 | 
			
		||||
        default=None,
 | 
			
		||||
        on_delete=models.SET_DEFAULT,
 | 
			
		||||
        help_text=_("Web Certificate used by the authentik Core webserver."),
 | 
			
		||||
        related_name="+",
 | 
			
		||||
    )
 | 
			
		||||
    client_certificates = models.ManyToManyField(
 | 
			
		||||
        CertificateKeyPair,
 | 
			
		||||
        default=None,
 | 
			
		||||
        blank=True,
 | 
			
		||||
        help_text=_("Certificates used for client authentication."),
 | 
			
		||||
    )
 | 
			
		||||
    attributes = models.JSONField(default=dict, blank=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -148,14 +148,3 @@ class TestBrands(APITestCase):
 | 
			
		||||
                "default_locale": "",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def test_custom_css(self):
 | 
			
		||||
        """Test custom_css"""
 | 
			
		||||
        brand = create_test_brand()
 | 
			
		||||
        brand.branding_custom_css = """* {
 | 
			
		||||
            font-family: "Foo bar";
 | 
			
		||||
        }"""
 | 
			
		||||
        brand.save()
 | 
			
		||||
        res = self.client.get(reverse("authentik_core:if-user"))
 | 
			
		||||
        self.assertEqual(res.status_code, 200)
 | 
			
		||||
        self.assertIn(brand.branding_custom_css, res.content.decode())
 | 
			
		||||
 | 
			
		||||
@ -5,8 +5,6 @@ from typing import Any
 | 
			
		||||
from django.db.models import F, Q
 | 
			
		||||
from django.db.models import Value as V
 | 
			
		||||
from django.http.request import HttpRequest
 | 
			
		||||
from django.utils.html import _json_script_escapes
 | 
			
		||||
from django.utils.safestring import mark_safe
 | 
			
		||||
 | 
			
		||||
from authentik import get_full_version
 | 
			
		||||
from authentik.brands.models import Brand
 | 
			
		||||
@ -34,13 +32,8 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
 | 
			
		||||
    """Context Processor that injects brand object into every template"""
 | 
			
		||||
    brand = getattr(request, "brand", DEFAULT_BRAND)
 | 
			
		||||
    tenant = getattr(request, "tenant", Tenant())
 | 
			
		||||
    # similarly to `json_script` we escape everything HTML-related, however django
 | 
			
		||||
    # only directly exposes this as a function that also wraps it in a <script> tag
 | 
			
		||||
    # which we dont want for CSS
 | 
			
		||||
    brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes))  # nosec
 | 
			
		||||
    return {
 | 
			
		||||
        "brand": brand,
 | 
			
		||||
        "brand_css": brand_css,
 | 
			
		||||
        "footer_links": tenant.footer_links,
 | 
			
		||||
        "html_meta": {**get_http_meta()},
 | 
			
		||||
        "version": get_full_version(),
 | 
			
		||||
 | 
			
		||||
@ -2,9 +2,11 @@
 | 
			
		||||
 | 
			
		||||
from collections.abc import Iterator
 | 
			
		||||
from copy import copy
 | 
			
		||||
from datetime import timedelta
 | 
			
		||||
 | 
			
		||||
from django.core.cache import cache
 | 
			
		||||
from django.db.models import QuerySet
 | 
			
		||||
from django.db.models.functions import ExtractHour
 | 
			
		||||
from django.shortcuts import get_object_or_404
 | 
			
		||||
from drf_spectacular.types import OpenApiTypes
 | 
			
		||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
 | 
			
		||||
@ -18,6 +20,7 @@ from rest_framework.response import Response
 | 
			
		||||
from rest_framework.viewsets import ModelViewSet
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.admin.api.metrics import CoordinateSerializer
 | 
			
		||||
from authentik.api.pagination import Pagination
 | 
			
		||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
 | 
			
		||||
from authentik.core.api.providers import ProviderSerializer
 | 
			
		||||
@ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin
 | 
			
		||||
from authentik.core.api.utils import ModelSerializer
 | 
			
		||||
from authentik.core.models import Application, User
 | 
			
		||||
from authentik.events.logs import LogEventSerializer, capture_logs
 | 
			
		||||
from authentik.events.models import EventAction
 | 
			
		||||
from authentik.lib.utils.file import (
 | 
			
		||||
    FilePathSerializer,
 | 
			
		||||
    FileUploadSerializer,
 | 
			
		||||
@ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        """Set application icon (as URL)"""
 | 
			
		||||
        app: Application = self.get_object()
 | 
			
		||||
        return set_file_url(request, app, "meta_icon")
 | 
			
		||||
 | 
			
		||||
    @permission_required("authentik_core.view_application", ["authentik_events.view_event"])
 | 
			
		||||
    @extend_schema(responses={200: CoordinateSerializer(many=True)})
 | 
			
		||||
    @action(detail=True, pagination_class=None, filter_backends=[])
 | 
			
		||||
    def metrics(self, request: Request, slug: str):
 | 
			
		||||
        """Metrics for application logins"""
 | 
			
		||||
        app = self.get_object()
 | 
			
		||||
        return Response(
 | 
			
		||||
            get_objects_for_user(request.user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.AUTHORIZE_APPLICATION,
 | 
			
		||||
                context__authorized_application__pk=app.pk.hex,
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,8 @@
 | 
			
		||||
"""Authenticator Devices API Views"""
 | 
			
		||||
 | 
			
		||||
from drf_spectacular.utils import extend_schema
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from drf_spectacular.types import OpenApiTypes
 | 
			
		||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
 | 
			
		||||
from guardian.shortcuts import get_objects_for_user
 | 
			
		||||
from rest_framework.fields import (
 | 
			
		||||
    BooleanField,
 | 
			
		||||
@ -13,7 +15,6 @@ from rest_framework.request import Request
 | 
			
		||||
from rest_framework.response import Response
 | 
			
		||||
from rest_framework.viewsets import ViewSet
 | 
			
		||||
 | 
			
		||||
from authentik.core.api.users import ParamUserSerializer
 | 
			
		||||
from authentik.core.api.utils import MetaNameSerializer
 | 
			
		||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
 | 
			
		||||
from authentik.stages.authenticator import device_classes, devices_for_user
 | 
			
		||||
@ -22,7 +23,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DeviceSerializer(MetaNameSerializer):
 | 
			
		||||
    """Serializer for authenticator devices"""
 | 
			
		||||
    """Serializer for Duo authenticator devices"""
 | 
			
		||||
 | 
			
		||||
    pk = CharField()
 | 
			
		||||
    name = CharField()
 | 
			
		||||
@ -32,27 +33,22 @@ class DeviceSerializer(MetaNameSerializer):
 | 
			
		||||
    last_updated = DateTimeField(read_only=True)
 | 
			
		||||
    last_used = DateTimeField(read_only=True, allow_null=True)
 | 
			
		||||
    extra_description = SerializerMethodField()
 | 
			
		||||
    external_id = SerializerMethodField()
 | 
			
		||||
 | 
			
		||||
    def get_type(self, instance: Device) -> str:
 | 
			
		||||
        """Get type of device"""
 | 
			
		||||
        return instance._meta.label
 | 
			
		||||
 | 
			
		||||
    def get_extra_description(self, instance: Device) -> str | None:
 | 
			
		||||
    def get_extra_description(self, instance: Device) -> str:
 | 
			
		||||
        """Get extra description"""
 | 
			
		||||
        if isinstance(instance, WebAuthnDevice):
 | 
			
		||||
            return instance.device_type.description if instance.device_type else None
 | 
			
		||||
            return (
 | 
			
		||||
                instance.device_type.description
 | 
			
		||||
                if instance.device_type
 | 
			
		||||
                else _("Extra description not available")
 | 
			
		||||
            )
 | 
			
		||||
        if isinstance(instance, EndpointDevice):
 | 
			
		||||
            return instance.data.get("deviceSignals", {}).get("deviceModel")
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    def get_external_id(self, instance: Device) -> str | None:
 | 
			
		||||
        """Get external Device ID"""
 | 
			
		||||
        if isinstance(instance, WebAuthnDevice):
 | 
			
		||||
            return instance.device_type.aaguid if instance.device_type else None
 | 
			
		||||
        if isinstance(instance, EndpointDevice):
 | 
			
		||||
            return instance.data.get("deviceSignals", {}).get("deviceModel")
 | 
			
		||||
        return None
 | 
			
		||||
        return ""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DeviceViewSet(ViewSet):
 | 
			
		||||
@ -61,6 +57,7 @@ class DeviceViewSet(ViewSet):
 | 
			
		||||
    serializer_class = DeviceSerializer
 | 
			
		||||
    permission_classes = [IsAuthenticated]
 | 
			
		||||
 | 
			
		||||
    @extend_schema(responses={200: DeviceSerializer(many=True)})
 | 
			
		||||
    def list(self, request: Request) -> Response:
 | 
			
		||||
        """Get all devices for current user"""
 | 
			
		||||
        devices = devices_for_user(request.user)
 | 
			
		||||
@ -82,11 +79,18 @@ class AdminDeviceViewSet(ViewSet):
 | 
			
		||||
            yield from device_set
 | 
			
		||||
 | 
			
		||||
    @extend_schema(
 | 
			
		||||
        parameters=[ParamUserSerializer],
 | 
			
		||||
        parameters=[
 | 
			
		||||
            OpenApiParameter(
 | 
			
		||||
                name="user",
 | 
			
		||||
                location=OpenApiParameter.QUERY,
 | 
			
		||||
                type=OpenApiTypes.INT,
 | 
			
		||||
            )
 | 
			
		||||
        ],
 | 
			
		||||
        responses={200: DeviceSerializer(many=True)},
 | 
			
		||||
    )
 | 
			
		||||
    def list(self, request: Request) -> Response:
 | 
			
		||||
        """Get all devices for current user"""
 | 
			
		||||
        args = ParamUserSerializer(data=request.query_params)
 | 
			
		||||
        args.is_valid(raise_exception=True)
 | 
			
		||||
        return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data)
 | 
			
		||||
        kwargs = {}
 | 
			
		||||
        if "user" in request.query_params:
 | 
			
		||||
            kwargs = {"user": request.query_params["user"]}
 | 
			
		||||
        return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)
 | 
			
		||||
 | 
			
		||||
@ -6,6 +6,7 @@ from typing import Any
 | 
			
		||||
 | 
			
		||||
from django.contrib.auth import update_session_auth_hash
 | 
			
		||||
from django.contrib.auth.models import Permission
 | 
			
		||||
from django.db.models.functions import ExtractHour
 | 
			
		||||
from django.db.transaction import atomic
 | 
			
		||||
from django.db.utils import IntegrityError
 | 
			
		||||
from django.urls import reverse_lazy
 | 
			
		||||
@ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator
 | 
			
		||||
from rest_framework.viewsets import ModelViewSet
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.admin.api.metrics import CoordinateSerializer
 | 
			
		||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
 | 
			
		||||
from authentik.brands.models import Brand
 | 
			
		||||
from authentik.core.api.used_by import UsedByMixin
 | 
			
		||||
@ -82,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN
 | 
			
		||||
from authentik.lib.avatars import get_avatar
 | 
			
		||||
from authentik.rbac.decorators import permission_required
 | 
			
		||||
from authentik.rbac.models import get_permission_choices
 | 
			
		||||
from authentik.stages.email.flow import pickle_flow_token_for_email
 | 
			
		||||
from authentik.stages.email.models import EmailStage
 | 
			
		||||
from authentik.stages.email.tasks import send_mails
 | 
			
		||||
from authentik.stages.email.utils import TemplateEmailMessage
 | 
			
		||||
@ -90,12 +91,6 @@ from authentik.stages.email.utils import TemplateEmailMessage
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ParamUserSerializer(PassiveSerializer):
 | 
			
		||||
    """Partial serializer for query parameters to select a user"""
 | 
			
		||||
 | 
			
		||||
    user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserGroupSerializer(ModelSerializer):
 | 
			
		||||
    """Simplified Group Serializer for user's groups"""
 | 
			
		||||
 | 
			
		||||
@ -321,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer):
 | 
			
		||||
    original = UserSelfSerializer(required=False)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserMetricsSerializer(PassiveSerializer):
 | 
			
		||||
    """User Metrics"""
 | 
			
		||||
 | 
			
		||||
    logins = SerializerMethodField()
 | 
			
		||||
    logins_failed = SerializerMethodField()
 | 
			
		||||
    authorizations = SerializerMethodField()
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_logins(self, _):
 | 
			
		||||
        """Get successful logins per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        request = self.context["request"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(request.user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.LOGIN, user__pk=user.pk
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_logins_failed(self, _):
 | 
			
		||||
        """Get failed logins per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        request = self.context["request"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(request.user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.LOGIN_FAILED, context__username=user.username
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @extend_schema_field(CoordinateSerializer(many=True))
 | 
			
		||||
    def get_authorizations(self, _):
 | 
			
		||||
        """Get failed logins per 8 hours for the last 7 days"""
 | 
			
		||||
        user = self.context["user"]
 | 
			
		||||
        request = self.context["request"]
 | 
			
		||||
        return (
 | 
			
		||||
            get_objects_for_user(request.user, "authentik_events.view_event").filter(
 | 
			
		||||
                action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
 | 
			
		||||
            )
 | 
			
		||||
            # 3 data points per day, so 8 hour spans
 | 
			
		||||
            .get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UsersFilter(FilterSet):
 | 
			
		||||
    """Filter for users"""
 | 
			
		||||
 | 
			
		||||
@ -392,23 +434,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    queryset = User.objects.none()
 | 
			
		||||
    ordering = ["username"]
 | 
			
		||||
    serializer_class = UserSerializer
 | 
			
		||||
    filterset_class = UsersFilter
 | 
			
		||||
    search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
 | 
			
		||||
 | 
			
		||||
    def get_ql_fields(self):
 | 
			
		||||
        from djangoql.schema import BoolField, StrField
 | 
			
		||||
 | 
			
		||||
        from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            StrField(User, "username"),
 | 
			
		||||
            StrField(User, "name"),
 | 
			
		||||
            StrField(User, "email"),
 | 
			
		||||
            StrField(User, "path"),
 | 
			
		||||
            BoolField(User, "is_active", nullable=True),
 | 
			
		||||
            ChoiceSearchField(User, "type"),
 | 
			
		||||
            JSONSearchField(User, "attributes", suggest_nested=False),
 | 
			
		||||
        ]
 | 
			
		||||
    filterset_class = UsersFilter
 | 
			
		||||
 | 
			
		||||
    def get_queryset(self):
 | 
			
		||||
        base_qs = User.objects.all().exclude_anonymous()
 | 
			
		||||
@ -424,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def list(self, request, *args, **kwargs):
 | 
			
		||||
        return super().list(request, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def _create_recovery_link(self, for_email=False) -> tuple[str, Token]:
 | 
			
		||||
    def _create_recovery_link(self) -> tuple[str, Token]:
 | 
			
		||||
        """Create a recovery link (when the current brand has a recovery flow set),
 | 
			
		||||
        that can either be shown to an admin or sent to the user directly"""
 | 
			
		||||
        brand: Brand = self.request._request.brand
 | 
			
		||||
@ -446,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
            raise ValidationError(
 | 
			
		||||
                {"non_field_errors": "Recovery flow not applicable to user"}
 | 
			
		||||
            ) from None
 | 
			
		||||
        _plan = FlowToken.pickle(plan)
 | 
			
		||||
        if for_email:
 | 
			
		||||
            _plan = pickle_flow_token_for_email(plan)
 | 
			
		||||
        token, __ = FlowToken.objects.update_or_create(
 | 
			
		||||
            identifier=f"{user.uid}-password-reset",
 | 
			
		||||
            defaults={
 | 
			
		||||
                "user": user,
 | 
			
		||||
                "flow": flow,
 | 
			
		||||
                "_plan": _plan,
 | 
			
		||||
                "revoke_on_execution": not for_email,
 | 
			
		||||
                "_plan": FlowToken.pickle(plan),
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        querystring = urlencode({QS_KEY_TOKEN: token.key})
 | 
			
		||||
@ -579,6 +602,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
            update_session_auth_hash(self.request, user)
 | 
			
		||||
        return Response(status=204)
 | 
			
		||||
 | 
			
		||||
    @permission_required("authentik_core.view_user", ["authentik_events.view_event"])
 | 
			
		||||
    @extend_schema(responses={200: UserMetricsSerializer(many=False)})
 | 
			
		||||
    @action(detail=True, pagination_class=None, filter_backends=[])
 | 
			
		||||
    def metrics(self, request: Request, pk: int) -> Response:
 | 
			
		||||
        """User metrics per 1h"""
 | 
			
		||||
        user: User = self.get_object()
 | 
			
		||||
        serializer = UserMetricsSerializer(instance={})
 | 
			
		||||
        serializer.context["user"] = user
 | 
			
		||||
        serializer.context["request"] = request
 | 
			
		||||
        return Response(serializer.data)
 | 
			
		||||
 | 
			
		||||
    @permission_required("authentik_core.reset_user_password")
 | 
			
		||||
    @extend_schema(
 | 
			
		||||
        responses={
 | 
			
		||||
@ -614,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
        if for_user.email == "":
 | 
			
		||||
            LOGGER.debug("User doesn't have an email address")
 | 
			
		||||
            raise ValidationError({"non_field_errors": "User does not have an email address set."})
 | 
			
		||||
        link, token = self._create_recovery_link(for_email=True)
 | 
			
		||||
        link, token = self._create_recovery_link()
 | 
			
		||||
        # Lookup the email stage to assure the current user can access it
 | 
			
		||||
        stages = get_objects_for_user(
 | 
			
		||||
            request.user, "authentik_stages_email.view_emailstage"
 | 
			
		||||
 | 
			
		||||
@ -2,7 +2,6 @@
 | 
			
		||||
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from django.db import models
 | 
			
		||||
from django.db.models import Model
 | 
			
		||||
from drf_spectacular.extensions import OpenApiSerializerFieldExtension
 | 
			
		||||
from drf_spectacular.plumbing import build_basic_type
 | 
			
		||||
@ -31,27 +30,7 @@ def is_dict(value: Any):
 | 
			
		||||
    raise ValidationError("Value must be a dictionary, and not have any duplicate keys.")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class JSONDictField(JSONField):
 | 
			
		||||
    """JSON Field which only allows dictionaries"""
 | 
			
		||||
 | 
			
		||||
    default_validators = [is_dict]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class JSONExtension(OpenApiSerializerFieldExtension):
 | 
			
		||||
    """Generate API Schema for JSON fields as"""
 | 
			
		||||
 | 
			
		||||
    target_class = "authentik.core.api.utils.JSONDictField"
 | 
			
		||||
 | 
			
		||||
    def map_serializer_field(self, auto_schema, direction):
 | 
			
		||||
        return build_basic_type(OpenApiTypes.OBJECT)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ModelSerializer(BaseModelSerializer):
 | 
			
		||||
 | 
			
		||||
    # By default, JSON fields we have are used to store dictionaries
 | 
			
		||||
    serializer_field_mapping = BaseModelSerializer.serializer_field_mapping.copy()
 | 
			
		||||
    serializer_field_mapping[models.JSONField] = JSONDictField
 | 
			
		||||
 | 
			
		||||
    def create(self, validated_data):
 | 
			
		||||
        instance = super().create(validated_data)
 | 
			
		||||
 | 
			
		||||
@ -92,6 +71,21 @@ class ModelSerializer(BaseModelSerializer):
 | 
			
		||||
        return instance
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class JSONDictField(JSONField):
 | 
			
		||||
    """JSON Field which only allows dictionaries"""
 | 
			
		||||
 | 
			
		||||
    default_validators = [is_dict]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class JSONExtension(OpenApiSerializerFieldExtension):
 | 
			
		||||
    """Generate API Schema for JSON fields as"""
 | 
			
		||||
 | 
			
		||||
    target_class = "authentik.core.api.utils.JSONDictField"
 | 
			
		||||
 | 
			
		||||
    def map_serializer_field(self, auto_schema, direction):
 | 
			
		||||
        return build_basic_type(OpenApiTypes.OBJECT)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PassiveSerializer(Serializer):
 | 
			
		||||
    """Base serializer class which doesn't implement create/update methods"""
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,8 @@
 | 
			
		||||
"""authentik core app config"""
 | 
			
		||||
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikCoreConfig(ManagedAppConfig):
 | 
			
		||||
@ -13,6 +14,14 @@ class AuthentikCoreConfig(ManagedAppConfig):
 | 
			
		||||
    mountpoint = ""
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_global
 | 
			
		||||
    def debug_worker_hook(self):
 | 
			
		||||
        """Dispatch startup tasks inline when debugging"""
 | 
			
		||||
        if settings.DEBUG:
 | 
			
		||||
            from authentik.root.celery import worker_ready_hook
 | 
			
		||||
 | 
			
		||||
            worker_ready_hook()
 | 
			
		||||
 | 
			
		||||
    @ManagedAppConfig.reconcile_tenant
 | 
			
		||||
    def source_inbuilt(self):
 | 
			
		||||
        """Reconcile inbuilt source"""
 | 
			
		||||
@ -25,18 +34,3 @@ class AuthentikCoreConfig(ManagedAppConfig):
 | 
			
		||||
            },
 | 
			
		||||
            managed=Source.MANAGED_INBUILT,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.core.tasks import clean_expired_models, clean_temporary_users
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=clean_expired_models,
 | 
			
		||||
                crontab="2-59/5 * * * *",
 | 
			
		||||
            ),
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=clean_temporary_users,
 | 
			
		||||
                crontab="9-59/5 * * * *",
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								authentik/core/management/commands/bootstrap_tasks.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,21 @@
 | 
			
		||||
"""Run bootstrap tasks"""
 | 
			
		||||
 | 
			
		||||
from django.core.management.base import BaseCommand
 | 
			
		||||
from django_tenants.utils import get_public_schema_name
 | 
			
		||||
 | 
			
		||||
from authentik.root.celery import _get_startup_tasks_all_tenants, _get_startup_tasks_default_tenant
 | 
			
		||||
from authentik.tenants.models import Tenant
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Command(BaseCommand):
 | 
			
		||||
    """Run bootstrap tasks to ensure certain objects are created"""
 | 
			
		||||
 | 
			
		||||
    def handle(self, **options):
 | 
			
		||||
        for task in _get_startup_tasks_default_tenant():
 | 
			
		||||
            with Tenant.objects.get(schema_name=get_public_schema_name()):
 | 
			
		||||
                task()
 | 
			
		||||
 | 
			
		||||
        for task in _get_startup_tasks_all_tenants():
 | 
			
		||||
            for tenant in Tenant.objects.filter(ready=True):
 | 
			
		||||
                with tenant:
 | 
			
		||||
                    task()
 | 
			
		||||
@ -13,6 +13,7 @@ class Command(TenantCommand):
 | 
			
		||||
        parser.add_argument("usernames", nargs="*", type=str)
 | 
			
		||||
 | 
			
		||||
    def handle_per_tenant(self, **options):
 | 
			
		||||
        print(options)
 | 
			
		||||
        new_type = UserTypes(options["type"])
 | 
			
		||||
        qs = (
 | 
			
		||||
            User.objects.exclude_anonymous()
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								authentik/core/management/commands/worker.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,47 @@
 | 
			
		||||
"""Run worker"""
 | 
			
		||||
 | 
			
		||||
from sys import exit as sysexit
 | 
			
		||||
from tempfile import tempdir
 | 
			
		||||
 | 
			
		||||
from celery.apps.worker import Worker
 | 
			
		||||
from django.core.management.base import BaseCommand
 | 
			
		||||
from django.db import close_old_connections
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.lib.debug import start_debug_server
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Command(BaseCommand):
 | 
			
		||||
    """Run worker"""
 | 
			
		||||
 | 
			
		||||
    def add_arguments(self, parser):
 | 
			
		||||
        parser.add_argument(
 | 
			
		||||
            "-b",
 | 
			
		||||
            "--beat",
 | 
			
		||||
            action="store_false",
 | 
			
		||||
            help="When set, this worker will _not_ run Beat (scheduled) tasks",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def handle(self, **options):
 | 
			
		||||
        LOGGER.debug("Celery options", **options)
 | 
			
		||||
        close_old_connections()
 | 
			
		||||
        start_debug_server()
 | 
			
		||||
        worker: Worker = CELERY_APP.Worker(
 | 
			
		||||
            no_color=False,
 | 
			
		||||
            quiet=True,
 | 
			
		||||
            optimization="fair",
 | 
			
		||||
            autoscale=(CONFIG.get_int("worker.concurrency"), 1),
 | 
			
		||||
            task_events=True,
 | 
			
		||||
            beat=options.get("beat", True),
 | 
			
		||||
            schedule_filename=f"{tempdir}/celerybeat-schedule",
 | 
			
		||||
            queues=["authentik", "authentik_scheduled", "authentik_events"],
 | 
			
		||||
        )
 | 
			
		||||
        for task in CELERY_APP.tasks:
 | 
			
		||||
            LOGGER.debug("Registered task", task=task)
 | 
			
		||||
 | 
			
		||||
        worker.start()
 | 
			
		||||
        sysexit(worker.exitcode)
 | 
			
		||||
@ -79,7 +79,6 @@ def _migrate_session(
 | 
			
		||||
        AuthenticatedSession.objects.using(db_alias).create(
 | 
			
		||||
            session=session,
 | 
			
		||||
            user=old_auth_session.user,
 | 
			
		||||
            uuid=old_auth_session.uuid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,81 +1,10 @@
 | 
			
		||||
# Generated by Django 5.1.9 on 2025-05-14 11:15
 | 
			
		||||
 | 
			
		||||
from django.apps.registry import Apps, apps as global_apps
 | 
			
		||||
from django.apps.registry import Apps
 | 
			
		||||
from django.db import migrations
 | 
			
		||||
from django.contrib.contenttypes.management import create_contenttypes
 | 
			
		||||
from django.contrib.auth.management import create_permissions
 | 
			
		||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
 | 
			
		||||
    """Migrate permissions from OldAuthenticatedSession to AuthenticatedSession"""
 | 
			
		||||
    db_alias = schema_editor.connection.alias
 | 
			
		||||
 | 
			
		||||
    # `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the
 | 
			
		||||
    # real config for creating permissions and content types
 | 
			
		||||
    authentik_core_config = global_apps.get_app_config("authentik_core")
 | 
			
		||||
    # These are only ran by django after all migrations, but we need them right now.
 | 
			
		||||
    # `global_apps` is needed,
 | 
			
		||||
    create_permissions(authentik_core_config, using=db_alias, verbosity=1)
 | 
			
		||||
    create_contenttypes(authentik_core_config, using=db_alias, verbosity=1)
 | 
			
		||||
 | 
			
		||||
    # But from now on, this is just a regular migration, so use `apps`
 | 
			
		||||
    Permission = apps.get_model("auth", "Permission")
 | 
			
		||||
    ContentType = apps.get_model("contenttypes", "ContentType")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        old_ct = ContentType.objects.using(db_alias).get(
 | 
			
		||||
            app_label="authentik_core", model="oldauthenticatedsession"
 | 
			
		||||
        )
 | 
			
		||||
        new_ct = ContentType.objects.using(db_alias).get(
 | 
			
		||||
            app_label="authentik_core", model="authenticatedsession"
 | 
			
		||||
        )
 | 
			
		||||
    except ContentType.DoesNotExist:
 | 
			
		||||
        # This should exist at this point, but if not, let's cut our losses
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # Get all permissions for the old content type
 | 
			
		||||
    old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct)
 | 
			
		||||
 | 
			
		||||
    # Create equivalent permissions for the new content type
 | 
			
		||||
    for old_perm in old_perms:
 | 
			
		||||
        new_perm = (
 | 
			
		||||
            Permission.objects.using(db_alias)
 | 
			
		||||
            .filter(
 | 
			
		||||
                content_type=new_ct,
 | 
			
		||||
                codename=old_perm.codename,
 | 
			
		||||
            )
 | 
			
		||||
            .first()
 | 
			
		||||
        )
 | 
			
		||||
        if not new_perm:
 | 
			
		||||
            # This should exist at this point, but if not, let's cut our losses
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Global user permissions
 | 
			
		||||
        User = apps.get_model("authentik_core", "User")
 | 
			
		||||
        User.user_permissions.through.objects.using(db_alias).filter(
 | 
			
		||||
            permission=old_perm
 | 
			
		||||
        ).all().update(permission=new_perm)
 | 
			
		||||
 | 
			
		||||
        # Global role permissions
 | 
			
		||||
        DjangoGroup = apps.get_model("auth", "Group")
 | 
			
		||||
        DjangoGroup.permissions.through.objects.using(db_alias).filter(
 | 
			
		||||
            permission=old_perm
 | 
			
		||||
        ).all().update(permission=new_perm)
 | 
			
		||||
 | 
			
		||||
        # Object user permissions
 | 
			
		||||
        UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
 | 
			
		||||
        UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
 | 
			
		||||
            permission=new_perm, content_type=new_ct
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Object role permissions
 | 
			
		||||
        GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission")
 | 
			
		||||
        GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
 | 
			
		||||
            permission=new_perm, content_type=new_ct
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def remove_old_authenticated_session_content_type(
 | 
			
		||||
    apps: Apps, schema_editor: BaseDatabaseSchemaEditor
 | 
			
		||||
):
 | 
			
		||||
@ -92,12 +21,7 @@ class Migration(migrations.Migration):
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    operations = [
 | 
			
		||||
        migrations.RunPython(
 | 
			
		||||
            code=migrate_authenticated_session_permissions,
 | 
			
		||||
            reverse_code=migrations.RunPython.noop,
 | 
			
		||||
        ),
 | 
			
		||||
        migrations.RunPython(
 | 
			
		||||
            code=remove_old_authenticated_session_content_type,
 | 
			
		||||
            reverse_code=migrations.RunPython.noop,
 | 
			
		||||
        ),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
@ -18,7 +18,7 @@ from django.http import HttpRequest
 | 
			
		||||
from django.utils.functional import SimpleLazyObject, cached_property
 | 
			
		||||
from django.utils.timezone import now
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_cte import CTE, with_cte
 | 
			
		||||
from django_cte import CTEQuerySet, With
 | 
			
		||||
from guardian.conf import settings
 | 
			
		||||
from guardian.mixins import GuardianUserMixin
 | 
			
		||||
from model_utils.managers import InheritanceManager
 | 
			
		||||
@ -136,7 +136,7 @@ class AttributesMixin(models.Model):
 | 
			
		||||
        return instance, False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GroupQuerySet(QuerySet):
 | 
			
		||||
class GroupQuerySet(CTEQuerySet):
 | 
			
		||||
    def with_children_recursive(self):
 | 
			
		||||
        """Recursively get all groups that have the current queryset as parents
 | 
			
		||||
        or are indirectly related."""
 | 
			
		||||
@ -165,9 +165,9 @@ class GroupQuerySet(QuerySet):
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Build the recursive query, see above
 | 
			
		||||
        cte = CTE.recursive(make_cte)
 | 
			
		||||
        cte = With.recursive(make_cte)
 | 
			
		||||
        # Return the result, as a usable queryset for Group.
 | 
			
		||||
        return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid))
 | 
			
		||||
        return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Group(SerializerModel, AttributesMixin):
 | 
			
		||||
@ -1082,12 +1082,6 @@ class AuthenticatedSession(SerializerModel):
 | 
			
		||||
 | 
			
		||||
    user = models.ForeignKey(User, on_delete=models.CASCADE)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def serializer(self) -> type[Serializer]:
 | 
			
		||||
        from authentik.core.api.authenticated_sessions import AuthenticatedSessionSerializer
 | 
			
		||||
 | 
			
		||||
        return AuthenticatedSessionSerializer
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        verbose_name = _("Authenticated Session")
 | 
			
		||||
        verbose_name_plural = _("Authenticated Sessions")
 | 
			
		||||
 | 
			
		||||
@ -3,9 +3,6 @@
 | 
			
		||||
from datetime import datetime, timedelta
 | 
			
		||||
 | 
			
		||||
from django.utils.timezone import now
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_dramatiq_postgres.middleware import CurrentTask
 | 
			
		||||
from dramatiq.actor import actor
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.core.models import (
 | 
			
		||||
@ -14,14 +11,17 @@ from authentik.core.models import (
 | 
			
		||||
    ExpiringModel,
 | 
			
		||||
    User,
 | 
			
		||||
)
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Remove expired objects."))
 | 
			
		||||
def clean_expired_models():
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
@prefill_task
 | 
			
		||||
def clean_expired_models(self: SystemTask):
 | 
			
		||||
    """Remove expired objects"""
 | 
			
		||||
    messages = []
 | 
			
		||||
    for cls in ExpiringModel.__subclasses__():
 | 
			
		||||
        cls: ExpiringModel
 | 
			
		||||
        objects = (
 | 
			
		||||
@ -31,13 +31,16 @@ def clean_expired_models():
 | 
			
		||||
        for obj in objects:
 | 
			
		||||
            obj.expire_action()
 | 
			
		||||
        LOGGER.debug("Expired models", model=cls, amount=amount)
 | 
			
		||||
        self.info(f"Expired {amount} {cls._meta.verbose_name_plural}")
 | 
			
		||||
        messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
 | 
			
		||||
    self.set_status(TaskStatus.SUCCESSFUL, *messages)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Remove temporary users created by SAML Sources."))
 | 
			
		||||
def clean_temporary_users():
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
@prefill_task
 | 
			
		||||
def clean_temporary_users(self: SystemTask):
 | 
			
		||||
    """Remove temporary users created by SAML Sources"""
 | 
			
		||||
    _now = datetime.now()
 | 
			
		||||
    messages = []
 | 
			
		||||
    deleted_users = 0
 | 
			
		||||
    for user in User.objects.filter(**{f"attributes__{USER_ATTRIBUTE_GENERATED}": True}):
 | 
			
		||||
        if not user.attributes.get(USER_ATTRIBUTE_EXPIRES):
 | 
			
		||||
@ -49,4 +52,5 @@ def clean_temporary_users():
 | 
			
		||||
            LOGGER.debug("User is expired and will be deleted.", user=user, delta=delta)
 | 
			
		||||
            user.delete()
 | 
			
		||||
            deleted_users += 1
 | 
			
		||||
    self.info(f"Successfully deleted {deleted_users} users.")
 | 
			
		||||
    messages.append(f"Successfully deleted {deleted_users} users.")
 | 
			
		||||
    self.set_status(TaskStatus.SUCCESSFUL, *messages)
 | 
			
		||||
 | 
			
		||||
@ -16,7 +16,7 @@
 | 
			
		||||
        {% block head_before %}
 | 
			
		||||
        {% endblock %}
 | 
			
		||||
        <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
 | 
			
		||||
        <style>{{ brand_css }}</style>
 | 
			
		||||
        <style>{{ brand.branding_custom_css }}</style>
 | 
			
		||||
        <script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
 | 
			
		||||
        <script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
 | 
			
		||||
        {% block head %}
 | 
			
		||||
 | 
			
		||||
@ -10,7 +10,7 @@
 | 
			
		||||
{% endblock %}
 | 
			
		||||
 | 
			
		||||
{% block body %}
 | 
			
		||||
<ak-message-container alignment="bottom"></ak-message-container>
 | 
			
		||||
<ak-message-container></ak-message-container>
 | 
			
		||||
<ak-interface-admin>
 | 
			
		||||
    <ak-loading></ak-loading>
 | 
			
		||||
</ak-interface-admin>
 | 
			
		||||
 | 
			
		||||
@ -114,7 +114,6 @@ class TestApplicationsAPI(APITestCase):
 | 
			
		||||
        self.assertJSONEqual(
 | 
			
		||||
            response.content.decode(),
 | 
			
		||||
            {
 | 
			
		||||
                "autocomplete": {},
 | 
			
		||||
                "pagination": {
 | 
			
		||||
                    "next": 0,
 | 
			
		||||
                    "previous": 0,
 | 
			
		||||
@ -168,7 +167,6 @@ class TestApplicationsAPI(APITestCase):
 | 
			
		||||
        self.assertJSONEqual(
 | 
			
		||||
            response.content.decode(),
 | 
			
		||||
            {
 | 
			
		||||
                "autocomplete": {},
 | 
			
		||||
                "pagination": {
 | 
			
		||||
                    "next": 0,
 | 
			
		||||
                    "previous": 0,
 | 
			
		||||
 | 
			
		||||
@ -36,7 +36,7 @@ class TestTasks(APITestCase):
 | 
			
		||||
            expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API
 | 
			
		||||
        )
 | 
			
		||||
        key = token.key
 | 
			
		||||
        clean_expired_models.send()
 | 
			
		||||
        clean_expired_models.delay().get()
 | 
			
		||||
        token.refresh_from_db()
 | 
			
		||||
        self.assertNotEqual(key, token.key)
 | 
			
		||||
 | 
			
		||||
@ -50,5 +50,5 @@ class TestTasks(APITestCase):
 | 
			
		||||
                USER_ATTRIBUTE_EXPIRES: mktime(now().timetuple()),
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        clean_temporary_users.send()
 | 
			
		||||
        clean_temporary_users.delay().get()
 | 
			
		||||
        self.assertFalse(User.objects.filter(username=username))
 | 
			
		||||
 | 
			
		||||
@ -81,6 +81,22 @@ class TestUsersAPI(APITestCase):
 | 
			
		||||
        response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"})
 | 
			
		||||
        self.assertEqual(response.status_code, 200)
 | 
			
		||||
 | 
			
		||||
    def test_metrics(self):
 | 
			
		||||
        """Test user's metrics"""
 | 
			
		||||
        self.client.force_login(self.admin)
 | 
			
		||||
        response = self.client.get(
 | 
			
		||||
            reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, 200)
 | 
			
		||||
 | 
			
		||||
    def test_metrics_denied(self):
 | 
			
		||||
        """Test user's metrics (non-superuser)"""
 | 
			
		||||
        self.client.force_login(self.user)
 | 
			
		||||
        response = self.client.get(
 | 
			
		||||
            reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, 403)
 | 
			
		||||
 | 
			
		||||
    def test_recovery_no_flow(self):
 | 
			
		||||
        """Test user recovery link (no recovery flow set)"""
 | 
			
		||||
        self.client.force_login(self.admin)
 | 
			
		||||
 | 
			
		||||
@ -30,7 +30,6 @@ from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.core.api.used_by import UsedByMixin
 | 
			
		||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
 | 
			
		||||
from authentik.core.models import UserTypes
 | 
			
		||||
from authentik.crypto.apps import MANAGED_KEY
 | 
			
		||||
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
@ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def view_certificate(self, request: Request, pk: str) -> Response:
 | 
			
		||||
        """Return certificate-key pairs certificate and log access"""
 | 
			
		||||
        certificate: CertificateKeyPair = self.get_object()
 | 
			
		||||
        if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
 | 
			
		||||
            Event.new(  # noqa # nosec
 | 
			
		||||
                EventAction.SECRET_VIEW,
 | 
			
		||||
                secret=certificate,
 | 
			
		||||
                type="certificate",
 | 
			
		||||
            ).from_http(request)
 | 
			
		||||
        Event.new(  # noqa # nosec
 | 
			
		||||
            EventAction.SECRET_VIEW,
 | 
			
		||||
            secret=certificate,
 | 
			
		||||
            type="certificate",
 | 
			
		||||
        ).from_http(request)
 | 
			
		||||
        if "download" in request.query_params:
 | 
			
		||||
            # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
 | 
			
		||||
            response = HttpResponse(
 | 
			
		||||
@ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
 | 
			
		||||
    def view_private_key(self, request: Request, pk: str) -> Response:
 | 
			
		||||
        """Return certificate-key pairs private key and log access"""
 | 
			
		||||
        certificate: CertificateKeyPair = self.get_object()
 | 
			
		||||
        if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
 | 
			
		||||
            Event.new(  # noqa # nosec
 | 
			
		||||
                EventAction.SECRET_VIEW,
 | 
			
		||||
                secret=certificate,
 | 
			
		||||
                type="private_key",
 | 
			
		||||
            ).from_http(request)
 | 
			
		||||
        Event.new(  # noqa # nosec
 | 
			
		||||
            EventAction.SECRET_VIEW,
 | 
			
		||||
            secret=certificate,
 | 
			
		||||
            type="private_key",
 | 
			
		||||
        ).from_http(request)
 | 
			
		||||
        if "download" in request.query_params:
 | 
			
		||||
            # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
 | 
			
		||||
            response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")
 | 
			
		||||
 | 
			
		||||
@ -4,8 +4,6 @@ from datetime import UTC, datetime
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from authentik.lib.generators import generate_id
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
 | 
			
		||||
 | 
			
		||||
@ -69,14 +67,3 @@ class AuthentikCryptoConfig(ManagedAppConfig):
 | 
			
		||||
                "key_data": builder.private_key,
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.crypto.tasks import certificate_discovery
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=certificate_discovery,
 | 
			
		||||
                crontab=f"{fqdn_rand('crypto_certificate_discovery')} * * * *",
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										13
									
								
								authentik/crypto/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								authentik/crypto/settings.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,13 @@
 | 
			
		||||
"""Crypto task Settings"""
 | 
			
		||||
 | 
			
		||||
from celery.schedules import crontab
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    "crypto_certificate_discovery": {
 | 
			
		||||
        "task": "authentik.crypto.tasks.certificate_discovery",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("crypto_certificate_discovery"), hour="*"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
@ -7,13 +7,13 @@ from cryptography.hazmat.backends import default_backend
 | 
			
		||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
 | 
			
		||||
from cryptography.x509.base import load_pem_x509_certificate
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_dramatiq_postgres.middleware import CurrentTask
 | 
			
		||||
from dramatiq.actor import actor
 | 
			
		||||
from structlog.stdlib import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.crypto.models import CertificateKeyPair
 | 
			
		||||
from authentik.events.models import TaskStatus
 | 
			
		||||
from authentik.events.system_tasks import SystemTask, prefill_task
 | 
			
		||||
from authentik.lib.config import CONFIG
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
 | 
			
		||||
@ -36,9 +36,10 @@ def ensure_certificate_valid(body: str):
 | 
			
		||||
    return body
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Discover, import and update certificates from the filesystem."))
 | 
			
		||||
def certificate_discovery():
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
@prefill_task
 | 
			
		||||
def certificate_discovery(self: SystemTask):
 | 
			
		||||
    """Discover, import and update certificates from the filesystem"""
 | 
			
		||||
    certs = {}
 | 
			
		||||
    private_keys = {}
 | 
			
		||||
    discovered = 0
 | 
			
		||||
@ -83,4 +84,6 @@ def certificate_discovery():
 | 
			
		||||
                dirty = True
 | 
			
		||||
        if dirty:
 | 
			
		||||
            cert.save()
 | 
			
		||||
    self.info(f"Successfully imported {discovered} files.")
 | 
			
		||||
    self.set_status(
 | 
			
		||||
        TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=discovered))
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
@ -338,7 +338,7 @@ class TestCrypto(APITestCase):
 | 
			
		||||
            with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
 | 
			
		||||
                _key.write(builder.private_key)
 | 
			
		||||
            with CONFIG.patch("cert_discovery_dir", temp_dir):
 | 
			
		||||
                certificate_discovery.send()
 | 
			
		||||
                certificate_discovery()
 | 
			
		||||
        keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
 | 
			
		||||
            managed=MANAGED_DISCOVERED % "foo"
 | 
			
		||||
        ).first()
 | 
			
		||||
 | 
			
		||||
@ -3,8 +3,6 @@
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
 | 
			
		||||
from authentik.blueprints.apps import ManagedAppConfig
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EnterpriseConfig(ManagedAppConfig):
 | 
			
		||||
@ -28,14 +26,3 @@ class AuthentikEnterpriseConfig(EnterpriseConfig):
 | 
			
		||||
        from authentik.enterprise.license import LicenseKey
 | 
			
		||||
 | 
			
		||||
        return LicenseKey.cached_summary().status.is_valid
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.enterprise.tasks import enterprise_update_usage
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=enterprise_update_usage,
 | 
			
		||||
                crontab=f"{fqdn_rand('enterprise_update_usage')} */2 * * *",
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
@ -1,8 +1,6 @@
 | 
			
		||||
"""authentik Unique Password policy app config"""
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.apps import EnterpriseConfig
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
from authentik.tasks.schedules.lib import ScheduleSpec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
 | 
			
		||||
@ -10,21 +8,3 @@ class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
 | 
			
		||||
    label = "authentik_policies_unique_password"
 | 
			
		||||
    verbose_name = "authentik Enterprise.Policies.Unique Password"
 | 
			
		||||
    default = True
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tenant_schedule_specs(self) -> list[ScheduleSpec]:
 | 
			
		||||
        from authentik.enterprise.policies.unique_password.tasks import (
 | 
			
		||||
            check_and_purge_password_history,
 | 
			
		||||
            trim_password_histories,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return [
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=trim_password_histories,
 | 
			
		||||
                crontab=f"{fqdn_rand('policies_unique_password_trim')} */12 * * *",
 | 
			
		||||
            ),
 | 
			
		||||
            ScheduleSpec(
 | 
			
		||||
                actor=check_and_purge_password_history,
 | 
			
		||||
                crontab=f"{fqdn_rand('policies_unique_password_purge')} */24 * * *",
 | 
			
		||||
            ),
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										20
									
								
								authentik/enterprise/policies/unique_password/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								authentik/enterprise/policies/unique_password/settings.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,20 @@
 | 
			
		||||
"""Unique Password Policy settings"""
 | 
			
		||||
 | 
			
		||||
from celery.schedules import crontab
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    "policies_unique_password_trim_history": {
 | 
			
		||||
        "task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
    "policies_unique_password_check_purge": {
 | 
			
		||||
        "task": (
 | 
			
		||||
            "authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history"
 | 
			
		||||
        ),
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
@ -1,37 +1,35 @@
 | 
			
		||||
from django.db.models.aggregates import Count
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django_dramatiq_postgres.middleware import CurrentTask
 | 
			
		||||
from dramatiq.actor import actor
 | 
			
		||||
from structlog import get_logger
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.policies.unique_password.models import (
 | 
			
		||||
    UniquePasswordPolicy,
 | 
			
		||||
    UserPasswordHistory,
 | 
			
		||||
)
 | 
			
		||||
from authentik.tasks.models import Task
 | 
			
		||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
LOGGER = get_logger()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(
 | 
			
		||||
    description=_(
 | 
			
		||||
        "Check if any UniquePasswordPolicy exists, and if not, purge the password history table."
 | 
			
		||||
    )
 | 
			
		||||
)
 | 
			
		||||
def check_and_purge_password_history():
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
@prefill_task
 | 
			
		||||
def check_and_purge_password_history(self: SystemTask):
 | 
			
		||||
    """Check if any UniquePasswordPolicy exists, and if not, purge the password history table.
 | 
			
		||||
    This is run on a schedule instead of being triggered by policy binding deletion.
 | 
			
		||||
    """
 | 
			
		||||
    if not UniquePasswordPolicy.objects.exists():
 | 
			
		||||
        UserPasswordHistory.objects.all().delete()
 | 
			
		||||
        LOGGER.debug("Purged UserPasswordHistory table as no policies are in use")
 | 
			
		||||
        self.info("Successfully purged UserPasswordHistory")
 | 
			
		||||
        self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory")
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    self.info("Not purging password histories, a unique password policy exists")
 | 
			
		||||
    self.set_status(
 | 
			
		||||
        TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Remove user password history that are too old."))
 | 
			
		||||
def trim_password_histories():
 | 
			
		||||
@CELERY_APP.task(bind=True, base=SystemTask)
 | 
			
		||||
def trim_password_histories(self: SystemTask):
 | 
			
		||||
    """Removes rows from UserPasswordHistory older than
 | 
			
		||||
    the `n` most recent entries.
 | 
			
		||||
 | 
			
		||||
@ -39,8 +37,6 @@ def trim_password_histories():
 | 
			
		||||
    UniquePasswordPolicy policies.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    self: Task = CurrentTask.get_task()
 | 
			
		||||
 | 
			
		||||
    # No policy, we'll let the cleanup above do its thing
 | 
			
		||||
    if not UniquePasswordPolicy.objects.exists():
 | 
			
		||||
        return
 | 
			
		||||
@ -67,4 +63,4 @@ def trim_password_histories():
 | 
			
		||||
 | 
			
		||||
    num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete()
 | 
			
		||||
    LOGGER.debug("Deleted stale password history records", count=num_deleted)
 | 
			
		||||
    self.info(f"Delete {num_deleted} stale password history records")
 | 
			
		||||
    self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records")
 | 
			
		||||
 | 
			
		||||
@ -76,7 +76,7 @@ class TestCheckAndPurgePasswordHistory(TestCase):
 | 
			
		||||
        self.assertTrue(UserPasswordHistory.objects.exists())
 | 
			
		||||
 | 
			
		||||
        # Run the task - should purge since no policy is in use
 | 
			
		||||
        check_and_purge_password_history.send()
 | 
			
		||||
        check_and_purge_password_history()
 | 
			
		||||
 | 
			
		||||
        # Verify the table is empty
 | 
			
		||||
        self.assertFalse(UserPasswordHistory.objects.exists())
 | 
			
		||||
@ -99,7 +99,7 @@ class TestCheckAndPurgePasswordHistory(TestCase):
 | 
			
		||||
        self.assertTrue(UserPasswordHistory.objects.exists())
 | 
			
		||||
 | 
			
		||||
        # Run the task - should NOT purge since a policy is in use
 | 
			
		||||
        check_and_purge_password_history.send()
 | 
			
		||||
        check_and_purge_password_history()
 | 
			
		||||
 | 
			
		||||
        # Verify the entries still exist
 | 
			
		||||
        self.assertTrue(UserPasswordHistory.objects.exists())
 | 
			
		||||
@ -119,17 +119,17 @@ class TestTrimPasswordHistory(TestCase):
 | 
			
		||||
            [
 | 
			
		||||
                UserPasswordHistory(
 | 
			
		||||
                    user=self.user,
 | 
			
		||||
                    old_password="hunter1",  # nosec
 | 
			
		||||
                    old_password="hunter1",  # nosec B106
 | 
			
		||||
                    created_at=_now - timedelta(days=3),
 | 
			
		||||
                ),
 | 
			
		||||
                UserPasswordHistory(
 | 
			
		||||
                    user=self.user,
 | 
			
		||||
                    old_password="hunter2",  # nosec
 | 
			
		||||
                    old_password="hunter2",  # nosec B106
 | 
			
		||||
                    created_at=_now - timedelta(days=2),
 | 
			
		||||
                ),
 | 
			
		||||
                UserPasswordHistory(
 | 
			
		||||
                    user=self.user,
 | 
			
		||||
                    old_password="hunter3",  # nosec
 | 
			
		||||
                    old_password="hunter3",  # nosec B106
 | 
			
		||||
                    created_at=_now,
 | 
			
		||||
                ),
 | 
			
		||||
            ]
 | 
			
		||||
@ -142,7 +142,7 @@ class TestTrimPasswordHistory(TestCase):
 | 
			
		||||
            enabled=True,
 | 
			
		||||
            order=0,
 | 
			
		||||
        )
 | 
			
		||||
        trim_password_histories.send()
 | 
			
		||||
        trim_password_histories.delay()
 | 
			
		||||
        user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user)
 | 
			
		||||
        self.assertEqual(len(user_pwd_history_qs), 1)
 | 
			
		||||
 | 
			
		||||
@ -159,7 +159,7 @@ class TestTrimPasswordHistory(TestCase):
 | 
			
		||||
            enabled=False,
 | 
			
		||||
            order=0,
 | 
			
		||||
        )
 | 
			
		||||
        trim_password_histories.send()
 | 
			
		||||
        trim_password_histories.delay()
 | 
			
		||||
        self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
 | 
			
		||||
 | 
			
		||||
    def test_trim_password_history_fewer_records_than_maximum_is_no_op(self):
 | 
			
		||||
@ -174,5 +174,5 @@ class TestTrimPasswordHistory(TestCase):
 | 
			
		||||
            enabled=True,
 | 
			
		||||
            order=0,
 | 
			
		||||
        )
 | 
			
		||||
        trim_password_histories.send()
 | 
			
		||||
        trim_password_histories.delay()
 | 
			
		||||
        self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
 | 
			
		||||
 | 
			
		||||
@ -55,5 +55,5 @@ class GoogleWorkspaceProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixi
 | 
			
		||||
    ]
 | 
			
		||||
    search_fields = ["name"]
 | 
			
		||||
    ordering = ["name"]
 | 
			
		||||
    sync_task = google_workspace_sync
 | 
			
		||||
    sync_single_task = google_workspace_sync
 | 
			
		||||
    sync_objects_task = google_workspace_sync_objects
 | 
			
		||||
 | 
			
		||||
@ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient(
 | 
			
		||||
    """Google client for groups"""
 | 
			
		||||
 | 
			
		||||
    connection_type = GoogleWorkspaceProviderGroup
 | 
			
		||||
    connection_attr = "googleworkspaceprovidergroup_set"
 | 
			
		||||
    connection_type_query = "group"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: GoogleWorkspaceProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
 | 
			
		||||
    """Sync authentik users into google workspace"""
 | 
			
		||||
 | 
			
		||||
    connection_type = GoogleWorkspaceProviderUser
 | 
			
		||||
    connection_attr = "googleworkspaceprovideruser_set"
 | 
			
		||||
    connection_type_query = "user"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: GoogleWorkspaceProvider) -> None:
 | 
			
		||||
 | 
			
		||||
@ -7,7 +7,6 @@ from django.db import models
 | 
			
		||||
from django.db.models import QuerySet
 | 
			
		||||
from django.templatetags.static import static
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from dramatiq.actor import Actor
 | 
			
		||||
from google.oauth2.service_account import Credentials
 | 
			
		||||
from rest_framework.serializers import Serializer
 | 
			
		||||
 | 
			
		||||
@ -111,12 +110,6 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
        help_text=_("Property mappings used for group creation/updating."),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def sync_actor(self) -> Actor:
 | 
			
		||||
        from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
 | 
			
		||||
 | 
			
		||||
        return google_workspace_sync
 | 
			
		||||
 | 
			
		||||
    def client_for_model(
 | 
			
		||||
        self,
 | 
			
		||||
        model: type[User | Group | GoogleWorkspaceProviderUser | GoogleWorkspaceProviderGroup],
 | 
			
		||||
@ -139,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
        if type == User:
 | 
			
		||||
            # Get queryset of all users with consistent ordering
 | 
			
		||||
            # according to the provider's settings
 | 
			
		||||
            base = (
 | 
			
		||||
                User.objects.prefetch_related("googleworkspaceprovideruser_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .exclude_anonymous()
 | 
			
		||||
            )
 | 
			
		||||
            base = User.objects.all().exclude_anonymous()
 | 
			
		||||
            if self.exclude_users_service_account:
 | 
			
		||||
                base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
 | 
			
		||||
                    type=UserTypes.INTERNAL_SERVICE_ACCOUNT
 | 
			
		||||
@ -153,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
 | 
			
		||||
            return base.order_by("pk")
 | 
			
		||||
        if type == Group:
 | 
			
		||||
            # Get queryset of all groups with consistent ordering
 | 
			
		||||
            return (
 | 
			
		||||
                Group.objects.prefetch_related("googleworkspaceprovidergroup_set")
 | 
			
		||||
                .all()
 | 
			
		||||
                .order_by("pk")
 | 
			
		||||
            )
 | 
			
		||||
            return Group.objects.all().order_by("pk")
 | 
			
		||||
        raise ValueError(f"Invalid type {type}")
 | 
			
		||||
 | 
			
		||||
    def google_credentials(self):
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										13
									
								
								authentik/enterprise/providers/google_workspace/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								authentik/enterprise/providers/google_workspace/settings.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,13 @@
 | 
			
		||||
"""Google workspace provider task Settings"""
 | 
			
		||||
 | 
			
		||||
from celery.schedules import crontab
 | 
			
		||||
 | 
			
		||||
from authentik.lib.utils.time import fqdn_rand
 | 
			
		||||
 | 
			
		||||
CELERY_BEAT_SCHEDULE = {
 | 
			
		||||
    "providers_google_workspace_sync": {
 | 
			
		||||
        "task": "authentik.enterprise.providers.google_workspace.tasks.google_workspace_sync_all",
 | 
			
		||||
        "schedule": crontab(minute=fqdn_rand("google_workspace_sync_all"), hour="*/4"),
 | 
			
		||||
        "options": {"queue": "authentik_scheduled"},
 | 
			
		||||
    },
 | 
			
		||||
}
 | 
			
		||||
@ -2,13 +2,15 @@
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
 | 
			
		||||
from authentik.enterprise.providers.google_workspace.tasks import (
 | 
			
		||||
    google_workspace_sync_direct_dispatch,
 | 
			
		||||
    google_workspace_sync_m2m_dispatch,
 | 
			
		||||
    google_workspace_sync,
 | 
			
		||||
    google_workspace_sync_direct,
 | 
			
		||||
    google_workspace_sync_m2m,
 | 
			
		||||
)
 | 
			
		||||
from authentik.lib.sync.outgoing.signals import register_signals
 | 
			
		||||
 | 
			
		||||
register_signals(
 | 
			
		||||
    GoogleWorkspaceProvider,
 | 
			
		||||
    task_sync_direct_dispatch=google_workspace_sync_direct_dispatch,
 | 
			
		||||
    task_sync_m2m_dispatch=google_workspace_sync_m2m_dispatch,
 | 
			
		||||
    task_sync_single=google_workspace_sync,
 | 
			
		||||
    task_sync_direct=google_workspace_sync_direct,
 | 
			
		||||
    task_sync_m2m=google_workspace_sync_m2m,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
@ -1,48 +1,37 @@
 | 
			
		||||
"""Google Provider tasks"""
 | 
			
		||||
 | 
			
		||||
from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from dramatiq.actor import actor
 | 
			
		||||
 | 
			
		||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
 | 
			
		||||
from authentik.events.system_tasks import SystemTask
 | 
			
		||||
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
 | 
			
		||||
from authentik.lib.sync.outgoing.tasks import SyncTasks
 | 
			
		||||
from authentik.root.celery import CELERY_APP
 | 
			
		||||
 | 
			
		||||
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Sync Google Workspace provider objects."))
 | 
			
		||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
 | 
			
		||||
def google_workspace_sync_objects(*args, **kwargs):
 | 
			
		||||
    return sync_tasks.sync_objects(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Full sync for Google Workspace provider."))
 | 
			
		||||
def google_workspace_sync(provider_pk: int, *args, **kwargs):
 | 
			
		||||
@CELERY_APP.task(
 | 
			
		||||
    base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
 | 
			
		||||
)
 | 
			
		||||
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
 | 
			
		||||
    """Run full sync for Google Workspace provider"""
 | 
			
		||||
    return sync_tasks.sync(provider_pk, google_workspace_sync_objects)
 | 
			
		||||
    return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Sync a direct object (user, group) for Google Workspace provider."))
 | 
			
		||||
@CELERY_APP.task()
 | 
			
		||||
def google_workspace_sync_all():
 | 
			
		||||
    return sync_tasks.sync_all(google_workspace_sync)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
 | 
			
		||||
def google_workspace_sync_direct(*args, **kwargs):
 | 
			
		||||
    return sync_tasks.sync_signal_direct(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(
 | 
			
		||||
    description=_(
 | 
			
		||||
        "Dispatch syncs for a direct object (user, group) for Google Workspace providers."
 | 
			
		||||
    )
 | 
			
		||||
)
 | 
			
		||||
def google_workspace_sync_direct_dispatch(*args, **kwargs):
 | 
			
		||||
    return sync_tasks.sync_signal_direct_dispatch(google_workspace_sync_direct, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(description=_("Sync a related object (memberships) for Google Workspace provider."))
 | 
			
		||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
 | 
			
		||||
def google_workspace_sync_m2m(*args, **kwargs):
 | 
			
		||||
    return sync_tasks.sync_signal_m2m(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@actor(
 | 
			
		||||
    description=_(
 | 
			
		||||
        "Dispatch syncs for a related object (memberships) for Google Workspace providers."
 | 
			
		||||
    )
 | 
			
		||||
)
 | 
			
		||||
def google_workspace_sync_m2m_dispatch(*args, **kwargs):
 | 
			
		||||
    return sync_tasks.sync_signal_m2m_dispatch(google_workspace_sync_m2m, *args, **kwargs)
 | 
			
		||||
 | 
			
		||||
@ -324,7 +324,7 @@ class GoogleWorkspaceGroupTests(TestCase):
 | 
			
		||||
            "authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
 | 
			
		||||
            MagicMock(return_value={"developerKey": self.api_key, "http": http}),
 | 
			
		||||
        ):
 | 
			
		||||
            google_workspace_sync.send(self.provider.pk).get_result()
 | 
			
		||||
            google_workspace_sync.delay(self.provider.pk).get()
 | 
			
		||||
            self.assertTrue(
 | 
			
		||||
                GoogleWorkspaceProviderGroup.objects.filter(
 | 
			
		||||
                    group=different_group, provider=self.provider
 | 
			
		||||
 | 
			
		||||
@ -302,7 +302,7 @@ class GoogleWorkspaceUserTests(TestCase):
 | 
			
		||||
            "authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
 | 
			
		||||
            MagicMock(return_value={"developerKey": self.api_key, "http": http}),
 | 
			
		||||
        ):
 | 
			
		||||
            google_workspace_sync.send(self.provider.pk).get_result()
 | 
			
		||||
            google_workspace_sync.delay(self.provider.pk).get()
 | 
			
		||||
            self.assertTrue(
 | 
			
		||||
                GoogleWorkspaceProviderUser.objects.filter(
 | 
			
		||||
                    user=different_user, provider=self.provider
 | 
			
		||||
 | 
			
		||||
@ -53,5 +53,5 @@ class MicrosoftEntraProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin
 | 
			
		||||
    ]
 | 
			
		||||
    search_fields = ["name"]
 | 
			
		||||
    ordering = ["name"]
 | 
			
		||||
    sync_task = microsoft_entra_sync
 | 
			
		||||
    sync_single_task = microsoft_entra_sync
 | 
			
		||||
    sync_objects_task = microsoft_entra_sync_objects
 | 
			
		||||
 | 
			
		||||
@ -29,7 +29,7 @@ class MicrosoftEntraGroupClient(
 | 
			
		||||
    """Microsoft client for groups"""
 | 
			
		||||
 | 
			
		||||
    connection_type = MicrosoftEntraProviderGroup
 | 
			
		||||
    connection_attr = "microsoftentraprovidergroup_set"
 | 
			
		||||
    connection_type_query = "group"
 | 
			
		||||
    can_discover = True
 | 
			
		||||
 | 
			
		||||
    def __init__(self, provider: MicrosoftEntraProvider) -> None:
 | 
			
		||||
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user