Compare commits

..

1 Commits

Author SHA1 Message Date
a4f92f5e30 Prep for monorepo use.
web: Update config.

Flesh out build.

Fix issue surrounding build.

Fix paths.

Update workspaces.

Fix build steps.

Apply linter. Temporarily remove problem rules.

Add ignorefile. Prep for formatting.

Lint website.

Lint web, repo packages.

Refine Prettier usage. Fix imports.

Tidy build.

Move node ignore files.

Remove unused.

Update job. Fix lint step.

Build before compiling.

Use root for paths.

Fix issues surrounding import references, types, package names.

Fix build paths.

Tidy.

Enforce prefix.

Apply prefixes to imports.

Enable linter, compiler, etc.

Fix references. Update names.

Mark optional.

Revise mounts. Fix build order.

Update package.json.

Ignore all docusaurus.

Fix paths, types.

Clean up build steps, names.

Fix paths.

website: Fix nested paragraphs build warning.

web: Enforce module resolution.

Use consistent LTS version.

Track Node version.

Use default resolution.

Test main entrypoint.

Fix Node v20 compatibility.

Add task names.

WIP: Fix styles.
2025-04-17 02:46:10 +02:00
1213 changed files with 77703 additions and 119795 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 2025.6.3 current_version = 2025.2.4
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))? parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
@ -21,8 +21,6 @@ optional_value = final
[bumpversion:file:package.json] [bumpversion:file:package.json]
[bumpversion:file:package-lock.json]
[bumpversion:file:docker-compose.yml] [bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml] [bumpversion:file:schema.yml]
@ -33,4 +31,6 @@ optional_value = final
[bumpversion:file:internal/constants/constants.go] [bumpversion:file:internal/constants/constants.go]
[bumpversion:file:web/src/common/constants.ts]
[bumpversion:file:lifecycle/aws/template.yaml] [bumpversion:file:lifecycle/aws/template.yaml]

View File

@ -5,10 +5,8 @@ dist/**
build/** build/**
build_docs/** build_docs/**
*Dockerfile *Dockerfile
**/*Dockerfile
blueprints/local blueprints/local
.git .git
!gen-ts-api/node_modules !gen-ts-api/node_modules
!gen-ts-api/dist/** !gen-ts-api/dist/**
!gen-go-api/ !gen-go-api/
.venv

View File

@ -7,10 +7,10 @@ charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
[*.toml] [*.html]
indent_size = 2 indent_size = 2
[*.html] [schemas/*.json]
indent_size = 2 indent_size = 2
[*.{yaml,yml}] [*.{yaml,yml}]

View File

@ -28,15 +28,15 @@ runs:
- name: Setup node - name: Setup node
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version-file: web/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: package-lock.json
- name: Setup go - name: Setup go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Setup docker cache - name: Setup docker cache
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7 uses: ScribeMD/docker-cache@0.5.0
with: with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }} key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies - name: Setup dependencies
@ -44,7 +44,7 @@ runs:
run: | run: |
export PSQL_TAG=${{ inputs.postgresql_version }} export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d docker compose -f .github/actions/setup/docker-compose.yml up -d
cd web && npm ci npm ci
- name: Generate config - name: Generate config
shell: uv run python {0} shell: uv run python {0}
run: | run: |

View File

@ -23,13 +23,7 @@ updates:
- package-ecosystem: npm - package-ecosystem: npm
directories: directories:
- "/web" - "/web"
- "/web/packages/sfe" - "/web/sfe"
- "/web/packages/core"
- "/web/packages/esbuild-plugin-live-reload"
- "/packages/prettier-config"
- "/packages/tsconfig"
- "/packages/docusaurus-config"
- "/packages/eslint-config"
schedule: schedule:
interval: daily interval: daily
time: "04:00" time: "04:00"
@ -74,9 +68,6 @@ updates:
wdio: wdio:
patterns: patterns:
- "@wdio/*" - "@wdio/*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/website" directory: "/website"
schedule: schedule:
@ -97,16 +88,6 @@ updates:
- "swc-*" - "swc-*"
- "lightningcss*" - "lightningcss*"
- "@rspack/binding*" - "@rspack/binding*"
goauthentik:
patterns:
- "@goauthentik/*"
eslint:
patterns:
- "@eslint/*"
- "@typescript-eslint/*"
- "eslint-*"
- "eslint"
- "typescript-eslint"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/lifecycle/aws" directory: "/lifecycle/aws"
schedule: schedule:
@ -137,15 +118,3 @@ updates:
prefix: "core:" prefix: "core:"
labels: labels:
- dependencies - dependencies
- package-ecosystem: docker-compose
directories:
# - /scripts # Maybe
- /tests/e2e
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies

View File

@ -1,5 +1,5 @@
# Re-usable workflow for a single-architecture build # Re-usable workflow for a single-architecture build
name: Single-arch Container build name: "Single-arch Container build"
on: on:
workflow_call: workflow_call:
@ -38,13 +38,11 @@ jobs:
# Needed for attestation # Needed for attestation
id-token: write id-token: write
attestations: write attestations: write
# Needed for checkout
contents: read
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3.6.0 - uses: docker/setup-qemu-action@v3.6.0
- uses: docker/setup-buildx-action@v3 - uses: docker/setup-buildx-action@v3
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:
@ -66,12 +64,12 @@ jobs:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients - name: Make empty clients
if: ${{ inputs.release }} if: ${{ inputs.release }}
run: | run: |
mkdir -p ./gen-ts-api mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api mkdir -p ./gen-go-api
- name: generate ts client - name: Generate TypeScript API Client
if: ${{ !inputs.release }} if: ${{ !inputs.release }}
run: make gen-client-ts run: make gen-client-ts
- name: Build Docker Image - name: Build Docker Image

View File

@ -1,5 +1,5 @@
# Re-usable workflow for a multi-architecture build # Re-usable workflow for a multi-architecture build
name: Multi-arch container build name: "Multi-arch container build"
on: on:
workflow_call: workflow_call:
@ -49,7 +49,7 @@ jobs:
shouldPush: ${{ steps.ev.outputs.shouldPush }} shouldPush: ${{ steps.ev.outputs.shouldPush }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:
@ -69,7 +69,7 @@ jobs:
tag: ${{ fromJson(needs.get-tags.outputs.tags) }} tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:

View File

@ -1,4 +1,5 @@
name: authentik-api-py-publish name: "Python API Publish"
on: on:
push: push:
branches: [main] branches: [main]
@ -7,6 +8,7 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
name: "Build and Publish"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
@ -30,7 +32,7 @@ jobs:
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version-file: "pyproject.toml" python-version-file: "pyproject.toml"
- name: Generate API Client - name: Generate Python API Client
run: make gen-client-py run: make gen-client-py
- name: Publish package - name: Publish package
working-directory: gen-py-api/ working-directory: gen-py-api/

View File

@ -1,4 +1,4 @@
name: authentik-api-ts-publish name: "TypeScript API Publish"
on: on:
push: push:
branches: [main] branches: [main]
@ -7,6 +7,7 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
name: "Build and Publish"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -20,9 +21,9 @@ jobs:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: web/package.json node-version-file: package.json
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Generate API Client - name: Generate TypeScript API Client
run: make gen-client-ts run: make gen-client-ts
- name: Publish package - name: Publish package
working-directory: gen-ts-api/ working-directory: gen-ts-api/
@ -53,7 +54,6 @@ jobs:
signoff: true signoff: true
# ID from https://api.github.com/users/authentik-automation[bot] # ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,4 +1,4 @@
name: authentik-ci-aws-cfn name: "authentik CI AWS CloudFormation"
on: on:
push: push:
@ -18,6 +18,7 @@ env:
jobs: jobs:
check-changes-applied: check-changes-applied:
name: "Check changes applied"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -36,6 +37,7 @@ jobs:
uv run make aws-cfn uv run make aws-cfn
git diff --exit-code git diff --exit-code
ci-aws-cfn-mark: ci-aws-cfn-mark:
name: "CI AWS CloudFormation Mark"
if: always() if: always()
needs: needs:
- check-changes-applied - check-changes-applied

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-ci-main-daily name: "authentik CI Main Daily"
on: on:
workflow_dispatch: workflow_dispatch:
@ -9,15 +9,15 @@ on:
jobs: jobs:
test-container: test-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }} name: "Test Container ${{ matrix.version }}"
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
version: version:
- docs - docs
- version-2025-4
- version-2025-2 - version-2025-2
- version-2024-12
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: | - run: |

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-ci-main name: "authentik CI Main"
on: on:
push: push:
@ -19,6 +19,7 @@ env:
jobs: jobs:
lint: lint:
name: "Lint"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -33,9 +34,10 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run job - name: Run job ${{ matrix.job }}
run: uv run make ci-${{ matrix.job }} run: uv run make ci-${{ matrix.job }}
test-migrations: test-migrations:
name: "Test Migrations"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -44,6 +46,7 @@ jobs:
- name: run migrations - name: run migrations
run: uv run python -m lifecycle.migrate run: uv run python -m lifecycle.migrate
test-make-seed: test-make-seed:
name: "Test Make Seed"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: seed - id: seed
@ -52,7 +55,7 @@ jobs:
outputs: outputs:
seed: ${{ steps.seed.outputs.seed }} seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable: test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: "Test Migrations From Stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 20
needs: test-make-seed needs: test-make-seed
@ -62,28 +65,31 @@ jobs:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5] run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: checkout stable - name: Checkout Stable
run: | run: |
# Copy current, latest config to local # Copy current, latest config to local
# Temporarly comment the .github backup while migrating to uv
cp authentik/lib/default.yml local.env.yml cp authentik/lib/default.yml local.env.yml
cp -R .github .. # cp -R .github ..
cp -R scripts .. cp -R scripts ..
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1) git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
rm -rf .github/ scripts/ # rm -rf .github/ scripts/
mv ../.github ../scripts . # mv ../.github ../scripts .
rm -rf scripts/
mv ../scripts .
- name: Setup authentik env (stable) - name: Setup authentik env (stable)
uses: ./.github/actions/setup uses: ./.github/actions/setup
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable continue-on-error: true
run: uv run python -m lifecycle.migrate - name: Run migrations to stable
- name: checkout current code run: poetry run python -m lifecycle.migrate
- name: Checkout current code
run: | run: |
set -x set -x
git fetch git fetch
@ -94,10 +100,10 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
with: with:
postgresql_version: ${{ matrix.psql }} postgresql_version: ${{ matrix.psql }}
- name: migrate to latest - name: Migrate to latest
run: | run: |
uv run python -m lifecycle.migrate uv run python -m lifecycle.migrate
- name: run tests - name: Run tests
env: env:
# Test in the main database that we just migrated from the previous stable version # Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
@ -107,7 +113,7 @@ jobs:
run: | run: |
uv run make ci-test uv run make ci-test
test-unittest: test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5 name: "Unit tests - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 20 timeout-minutes: 20
needs: test-make-seed needs: test-make-seed
@ -117,7 +123,6 @@ jobs:
psql: psql:
- 15-alpine - 15-alpine
- 16-alpine - 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5] run_id: [1, 2, 3, 4, 5]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -144,6 +149,7 @@ jobs:
file: unittest.xml file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
test-integration: test-integration:
name: "Integration tests"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
steps: steps:
@ -152,7 +158,7 @@ jobs:
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Create k8s Kind Cluster - name: Create k8s Kind Cluster
uses: helm/kind-action@v1.12.0 uses: helm/kind-action@v1.12.0
- name: run integration - name: Run integration
run: | run: |
uv run coverage run manage.py test tests/integration uv run coverage run manage.py test tests/integration
uv run coverage xml uv run coverage xml
@ -168,50 +174,50 @@ jobs:
file: unittest.xml file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
test-e2e: test-e2e:
name: test-e2e (${{ matrix.job.name }}) name: "Test E2E (${{ matrix.job.name }})"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30 timeout-minutes: 30
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
job: job:
- name: proxy - name: Proxy Provider
glob: tests/e2e/test_provider_proxy* glob: tests/e2e/test_provider_proxy*
- name: oauth - name: OAuth2 Provider
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth* glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
- name: oauth-oidc - name: OIDC Provider
glob: tests/e2e/test_provider_oidc* glob: tests/e2e/test_provider_oidc*
- name: saml - name: SAML Provider
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml* glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
- name: ldap - name: LDAP Provider
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap* glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: radius - name: RADIUS Provider
glob: tests/e2e/test_provider_radius* glob: tests/e2e/test_provider_radius*
- name: scim - name: SCIM Source
glob: tests/e2e/test_source_scim* glob: tests/e2e/test_source_scim*
- name: flows - name: Flows
glob: tests/e2e/test_flows* glob: tests/e2e/test_flows*
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc) - name: Setup E2E env (chrome, etc)
run: | run: |
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
- id: cache-web - id: cache-web
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: web/dist path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b key: ${{ runner.os }}-web-${{ hashFiles('./package-lock.json', 'web/src/**') }}
- name: prepare web ui - name: Prepare Web UI
if: steps.cache-web.outputs.cache-hit != 'true' if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
run: | run: |
npm ci npm ci
make -C .. gen-client-ts make gen-client-ts
npm run build npm run build -w @goauthentik/web
npm run build:sfe
- name: run e2e npm run typecheck
- name: Run E2E tests
run: | run: |
uv run coverage run manage.py test ${{ matrix.job.glob }} uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml uv run coverage xml
@ -227,6 +233,7 @@ jobs:
file: unittest.xml file: unittest.xml
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark: ci-core-mark:
name: "CI Core Mark"
if: always() if: always()
needs: needs:
- lint - lint
@ -241,21 +248,21 @@ jobs:
with: with:
jobs: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }}
build: build:
name: "Build"
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload container images to ghcr.io
packages: write packages: write
# Needed for attestation # Needed for attestation
id-token: write id-token: write
attestations: write attestations: write
# Needed for checkout
contents: read
needs: ci-core-mark needs: ci-core-mark
uses: ./.github/workflows/_reusable-docker-build.yaml uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit secrets: inherit
with: with:
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }} image_name: ghcr.io/goauthentik/dev-server
release: false release: false
pr-comment: pr-comment:
name: "PR Comment"
needs: needs:
- build - build
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -268,7 +275,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-ci-outpost name: "authentik CI Outpost"
on: on:
push: push:
@ -14,6 +14,7 @@ on:
jobs: jobs:
lint-golint: lint-golint:
name: "Lint Go"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -26,15 +27,16 @@ jobs:
mkdir -p web/dist mkdir -p web/dist
mkdir -p website/help mkdir -p website/help
touch web/dist/test website/help/test touch web/dist/test website/help/test
- name: Generate API - name: Generate Go API Client
run: make gen-client-go run: make gen-client-go
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v8 uses: golangci/golangci-lint-action@v7
with: with:
version: latest version: latest
args: --timeout 5000s --verbose args: --timeout 5000s --verbose
skip-cache: true skip-cache: true
test-unittest: test-unittest:
name: "Unit Test Go"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -43,12 +45,13 @@ jobs:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Generate API - name: Generate Go API Client
run: make gen-client-go run: make gen-client-go
- name: Go unittests - name: Go unittests
run: | run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark: ci-outpost-mark:
name: "CI Outpost Mark"
if: always() if: always()
needs: needs:
- lint-golint - lint-golint
@ -59,7 +62,7 @@ jobs:
with: with:
jobs: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }}
build-container: build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }} name: "Build Container"
timeout-minutes: 120 timeout-minutes: 120
needs: needs:
- ci-outpost-mark - ci-outpost-mark
@ -86,7 +89,7 @@ jobs:
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:
@ -100,7 +103,7 @@ jobs:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API - name: Generate Go API Client
run: make gen-client-go run: make gen-client-go
- name: Build Docker Image - name: Build Docker Image
id: push id: push
@ -123,6 +126,7 @@ jobs:
subject-digest: ${{ steps.push.outputs.digest }} subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true push-to-registry: true
build-binary: build-binary:
name: "Build Binary"
timeout-minutes: 120 timeout-minutes: 120
needs: needs:
- ci-outpost-mark - ci-outpost-mark
@ -141,21 +145,22 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-node@v4
with:
node-version-file: package.json
cache: "npm"
cache-dependency-path: package-lock.json
- name: Install Node.js dependencies
run: npm ci
- uses: actions/setup-go@v5 - uses: actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- uses: actions/setup-node@v4 - name: Generate Go API Client
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go run: make gen-client-go
- name: Build web - name: Build web
working-directory: web/
run: | run: |
npm ci npm ci
npm run build-proxy npm run build-proxy -w @goauthentik/web
- name: Build outpost - name: Build outpost
run: | run: |
set -x set -x

View File

@ -1,4 +1,4 @@
name: authentik-ci-web name: CI Web UI
on: on:
push: push:
@ -13,54 +13,50 @@ on:
jobs: jobs:
lint: lint:
runs-on: ubuntu-latest name: Lint
strategy:
fail-fast: false
matrix:
command:
- lint
- lint:lockfile
- tsc
- prettier-check
project:
- web
include:
- command: tsc
project: web
- command: lit-analyse
project: web
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
run: |
npm ci
- name: Generate API
run: make gen-client-ts
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: web/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: package-lock.json
- working-directory: web/ - name: Install Node.js dependencies
run: npm ci run: npm ci
- name: Generate API - name: Generate TypeScript API
run: make gen-client-ts
- name: Build
run: |
npm run build -w @goauthentik/web
- name: Type check
run: |
npm run typecheck
- name: Lint
run: |
npm run lint -w @goauthentik/web
npm run lint:lockfile -w @goauthentik/web
npm run lit-analyse -w @goauthentik/web
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: package.json
cache: "npm"
cache-dependency-path: package-lock.json
- name: Install Node.js dependencies
run: npm ci
- name: Generate TypeScript API
run: make gen-client-ts run: make gen-client-ts
- name: build - name: build
working-directory: web/ run: |
run: npm run build npm run build -w @goauthentik/web
npm run typecheck
ci-web-mark: ci-web-mark:
name: CI Web Mark
if: always() if: always()
needs: needs:
- build - build
@ -71,6 +67,7 @@ jobs:
with: with:
jobs: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }}
test: test:
name: Test
needs: needs:
- ci-web-mark - ci-web-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -78,13 +75,12 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: web/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: package-lock.json
- working-directory: web/ - name: Install Node.js dependencies
run: npm ci run: npm ci
- name: Generate API - name: Generate TypeScript API
run: make gen-client-ts run: make gen-client-ts
- name: test - name: Test Web UI
working-directory: web/ run: npm run test -w @goauthentik/web || exit 0
run: npm run test || exit 0

View File

@ -1,4 +1,4 @@
name: authentik-ci-website name: CI Docs Website
on: on:
push: push:
@ -13,114 +13,66 @@ on:
jobs: jobs:
lint: lint:
name: "Lint"
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- lint:lockfile
- prettier-check
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- working-directory: website/ - uses: actions/setup-node@v4
run: npm ci with:
- name: Lint node-version-file: package.json
working-directory: website/ cache: "npm"
run: npm run ${{ matrix.command }} cache-dependency-path: package-lock.json
- name: Install Node.js dependencies
run: |
npm ci
- name: Generate TypeScript API
run: make gen-client-ts
- name: Lint Docs
run: |
npm run lint:prettier:check
npm run lint:lockfile -w @goauthentik/docs
test: test:
name: "Test Docs"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: website/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: website/package-lock.json cache-dependency-path: package-lock.json
- working-directory: website/ - name: Install Node.js dependencies
run: npm ci run: |
- name: test npm ci
working-directory: website/ - name: Generate TypeScript API
run: npm test run: make gen-client-ts
- name: Test Docs
run: |
npm run test -w @goauthentik/docs
build: build:
name: "Build Docs"
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: ${{ matrix.job }}
strategy:
fail-fast: false
matrix:
job:
- build
- build:integrations
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: website/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: website/package-lock.json cache-dependency-path: package-lock.json
- working-directory: website/ - name: Install Node.js dependencies
run: npm ci run: npm ci
- name: build - name: Build
working-directory: website/ run: |
run: npm run ${{ matrix.job }} npm run build -w @goauthentik/docs
build-container:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@v2
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
ci-website-mark: ci-website-mark:
name: "CI Website Mark"
if: always() if: always()
needs: needs:
- lint - lint
- test - test
- build - build
- build-container
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: re-actors/alls-green@release/v1 - uses: re-actors/alls-green@release/v1
with: with:
jobs: ${{ toJSON(needs) }} jobs: ${{ toJSON(needs) }}
allowed-skips: ${{ github.repository == 'goauthentik/authentik-internal' && 'build-container' || '[]' }}

View File

@ -2,7 +2,7 @@ name: "CodeQL"
on: on:
push: push:
branches: [main, next, version*] branches: [main, "*", next, version*]
pull_request: pull_request:
branches: [main] branches: [main]
schedule: schedule:
@ -10,7 +10,7 @@ on:
jobs: jobs:
analyze: analyze:
name: Analyze name: "Analyze"
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
actions: read actions: read

View File

@ -1,4 +1,4 @@
name: authentik-gen-update-webauthn-mds name: "authentik CI Update WebAuthn MDS"
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
@ -11,6 +11,7 @@ env:
jobs: jobs:
build: build:
name: "Update WebAuthn MDS"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@ -37,7 +38,6 @@ jobs:
signoff: true signoff: true
# ID from https://api.github.com/users/authentik-automation[bot] # ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,6 +1,6 @@
--- ---
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries # See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup cache after PR is closed name: "Post-PR Closed Cache Cleanup"
on: on:
pull_request: pull_request:
types: types:
@ -12,6 +12,7 @@ permissions:
jobs: jobs:
cleanup: cleanup:
name: "Cleanup Cache"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out code - name: Check out code

View File

@ -1,4 +1,4 @@
name: ghcr-retention name: "authentik GHCR Retention Policy"
on: on:
# schedule: # schedule:
@ -8,7 +8,7 @@ on:
jobs: jobs:
clean-ghcr: clean-ghcr:
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
name: Delete old unused container images name: "Delete old unused container images"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-compress-images name: "authentik CI Image Compression"
on: on:
push: push:
@ -20,7 +20,7 @@ on:
jobs: jobs:
compress: compress:
name: compress name: "Compress Docker images"
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway # Don't run on forks. Token will not be available. Will run on main and open a PR anyway
if: | if: |
@ -53,7 +53,6 @@ jobs:
body: ${{ steps.compress.outputs.markdown }} body: ${{ steps.compress.outputs.markdown }}
delete-branch: true delete-branch: true
signoff: true signoff: true
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}" if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with: with:

View File

@ -3,11 +3,10 @@ on:
push: push:
branches: [main] branches: [main]
paths: paths:
- packages/docusaurus-config/** - packages/docusaurus-config
- packages/eslint-config/** - packages/eslint-config
- packages/prettier-config/** - packages/prettier-config
- packages/tsconfig/** - packages/tsconfig
- web/packages/esbuild-plugin-live-reload/**
workflow_dispatch: workflow_dispatch:
jobs: jobs:
publish: publish:
@ -17,28 +16,27 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
package: package:
- packages/docusaurus-config - docusaurus-config
- packages/eslint-config - eslint-config
- packages/prettier-config - prettier-config
- packages/tsconfig - tsconfig
- web/packages/esbuild-plugin-live-reload
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 2 fetch-depth: 2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: ${{ matrix.package }}/package.json node-version-file: packages/${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Get changed files - name: Get changed files
id: changed-files id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with: with:
files: | files: |
${{ matrix.package }}/package.json packages/${{ matrix.package }}/package.json
- name: Publish package - name: Publish package
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }} working-directory: packages/${{ matrix.package}}
run: | run: |
npm ci npm ci
npm run build npm run build

View File

@ -1,4 +1,4 @@
name: authentik-publish-source-docs name: "authentik Publish Source Docs"
on: on:
push: push:
@ -12,6 +12,7 @@ env:
jobs: jobs:
publish-source-docs: publish-source-docs:
name: "Publish"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120 timeout-minutes: 120
@ -19,11 +20,11 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: generate docs - name: Generate docs
run: | run: |
uv run make migrate uv run make migrate
uv run ak build_source_docs uv run ak build_source_docs
- name: Publish - name: Deploy to Netlify
uses: netlify/actions/cli@master uses: netlify/actions/cli@master
with: with:
args: deploy --dir=source_docs --prod args: deploy --dir=source_docs --prod

View File

@ -1,4 +1,4 @@
name: authentik-on-release-next-branch name: "authentik on Release Next Branch"
on: on:
schedule: schedule:
@ -11,6 +11,7 @@ permissions:
jobs: jobs:
update-next: update-next:
name: "Update Next Branch"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
environment: internal-production environment: internal-production

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-on-release name: "Release publish"
on: on:
release: release:
@ -7,6 +7,7 @@ on:
jobs: jobs:
build-server: build-server:
name: "Build server"
uses: ./.github/workflows/_reusable-docker-build.yaml uses: ./.github/workflows/_reusable-docker-build.yaml
secrets: inherit secrets: inherit
permissions: permissions:
@ -20,50 +21,8 @@ jobs:
release: true release: true
registry_dockerhub: true registry_dockerhub: true
registry_ghcr: true registry_ghcr: true
build-docs:
runs-on: ubuntu-latest
permissions:
# Needed to upload container images to ghcr.io
packages: write
# Needed for attestation
id-token: write
attestations: write
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@v2
id: attest
if: true
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
build-outpost: build-outpost:
name: "Build outpost"
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
# Needed to upload container images to ghcr.io # Needed to upload container images to ghcr.io
@ -88,14 +47,14 @@ jobs:
uses: docker/setup-qemu-action@v3.6.0 uses: docker/setup-qemu-action@v3.6.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }} image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
- name: make empty clients - name: Make empty clients
run: | run: |
mkdir -p ./gen-ts-api mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api mkdir -p ./gen-go-api
@ -128,6 +87,7 @@ jobs:
subject-digest: ${{ steps.push.outputs.digest }} subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true push-to-registry: true
build-outpost-binary: build-outpost-binary:
name: "Build outpost binary"
timeout-minutes: 120 timeout-minutes: 120
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
@ -149,14 +109,13 @@ jobs:
go-version-file: "go.mod" go-version-file: "go.mod"
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version-file: web/package.json node-version-file: package.json
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: package-lock.json
- name: Build web - name: Build web
working-directory: web/
run: | run: |
npm ci npm ci
npm run build-proxy npm run build-proxy -w @goauthentik/web
- name: Build outpost - name: Build outpost
run: | run: |
set -x set -x
@ -172,6 +131,7 @@ jobs:
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }} tag: ${{ github.ref }}
upload-aws-cfn-template: upload-aws-cfn-template:
name: "Upload AWS CloudFormation template"
permissions: permissions:
# Needed for AWS login # Needed for AWS login
id-token: write id-token: write
@ -193,6 +153,7 @@ jobs:
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
test-release: test-release:
name: "Test release"
needs: needs:
- build-server - build-server
- build-outpost - build-outpost
@ -209,6 +170,7 @@ jobs:
docker compose start postgresql redis docker compose start postgresql redis
docker compose run -u root server test-all docker compose run -u root server test-all
sentry-release: sentry-release:
name: "Sentry release"
needs: needs:
- build-server - build-server
- build-outpost - build-outpost
@ -216,7 +178,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:
@ -236,6 +198,6 @@ jobs:
SENTRY_ORG: authentik-security-inc SENTRY_ORG: authentik-security-inc
SENTRY_PROJECT: authentik SENTRY_PROJECT: authentik
with: with:
release: authentik@${{ steps.ev.outputs.version }} version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist" sourcemaps: "./web/dist"
url_prefix: "~/static/dist" url_prefix: "~/static/dist"

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-on-tag name: "authentik on Tag Release"
on: on:
push: push:
@ -8,7 +8,7 @@ on:
jobs: jobs:
build: build:
name: Create Release from Tag name: "Create Release from Tag"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -20,7 +20,7 @@ jobs:
with: with:
app_id: ${{ secrets.GH_APP_ID }} app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }} private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: prepare variables - name: Prepare variables
uses: ./.github/actions/docker-push-variables uses: ./.github/actions/docker-push-variables
id: ev id: ev
env: env:

View File

@ -1,21 +0,0 @@
name: "authentik-repo-mirror-cleanup"
on:
workflow_dispatch:
jobs:
to_internal:
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb
with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }}
args: --tags --force --prune
env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,20 +1,23 @@
name: "authentik-repo-mirror" name: "authentik Repository Mirror"
on: [push, delete] on: [push, delete]
jobs: jobs:
to_internal: to_internal:
name: "Mirror to internal repository"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
name: "Checkout repository"
with: with:
fetch-depth: 0 fetch-depth: 0
- if: ${{ env.MIRROR_KEY != '' }} - if: ${{ env.MIRROR_KEY != '' }}
uses: BeryJu/repository-mirroring-action@5cf300935bc2e068f73ea69bcc411a8a997208eb uses: pixta-dev/repository-mirroring-action@v1
with: with:
target_repo_url: git@github.com:goauthentik/authentik-internal.git target_repo_url:
ssh_private_key: ${{ secrets.GH_MIRROR_KEY }} git@github.com:goauthentik/authentik-internal.git
args: --tags --force ssh_private_key:
${{ secrets.GH_MIRROR_KEY }}
env: env:
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }} MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}

View File

@ -1,4 +1,4 @@
name: "authentik-repo-stale" name: "authentik Repository Stale Issues"
on: on:
schedule: schedule:
@ -11,6 +11,7 @@ permissions:
jobs: jobs:
stale: stale:
name: "Stale Issues"
if: ${{ github.repository != 'goauthentik/authentik-internal' }} if: ${{ github.repository != 'goauthentik/authentik-internal' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:

View File

@ -1,4 +1,4 @@
name: authentik-semgrep name: "authentik CI Semgrep"
on: on:
workflow_dispatch: {} workflow_dispatch: {}
pull_request: {} pull_request: {}
@ -13,7 +13,7 @@ on:
- cron: '12 15 * * *' - cron: '12 15 * * *'
jobs: jobs:
semgrep: semgrep:
name: semgrep/ci name: "semgrep/ci"
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: read contents: read

View File

@ -1,4 +1,4 @@
name: authentik-translation-advice name: "authentik Translations Advice"
on: on:
pull_request: pull_request:
@ -16,6 +16,7 @@ permissions:
jobs: jobs:
post-comment: post-comment:
name: "Post Comment"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Find Comment - name: Find Comment

View File

@ -1,5 +1,5 @@
--- ---
name: authentik-translate-extract-compile name: "authentik Extract & Compile Translations"
on: on:
schedule: schedule:
- cron: "0 0 * * *" # every day at midnight - cron: "0 0 * * *" # every day at midnight
@ -16,7 +16,7 @@ env:
jobs: jobs:
compile: compile:
if: ${{ github.repository != 'goauthentik/authentik-internal' }} name: "Compile Translations"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token - id: generate_token
@ -33,15 +33,20 @@ jobs:
if: ${{ github.event_name == 'pull_request' }} if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Generate API - name: Generate TypeScript API
run: make gen-client-ts run: make gen-client-ts
- name: run extract - name: Extract Translations
run: | run: |
uv run make i18n-extract uv run make i18n-extract
- name: run compile - name: Build Docs Site
run: npm run build-bundled -w @goauthentik/docs
- name: Build Web UI
run: npm run build -w @goauthentik/web
- name: Type check
run: npm run typecheck
- name: Compile Messages
run: | run: |
uv run ak compilemessages uv run ak compilemessages
make web-check-compile
- name: Create Pull Request - name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }} if: ${{ github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@v7 uses: peter-evans/create-pull-request@v7
@ -53,6 +58,3 @@ jobs:
body: "core, web: update translations" body: "core, web: update translations"
delete-branch: true delete-branch: true
signoff: true signoff: true
labels: dependencies
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>

View File

@ -1,6 +1,6 @@
# Rename transifex pull requests to have a correct naming # Rename transifex pull requests to have a correct naming
# Also enables auto squash-merge # Also enables auto squash-merge
name: authentik-translation-transifex-rename name: "authentik Translations Transifex PR Rename"
on: on:
pull_request: pull_request:
@ -12,10 +12,10 @@ permissions:
jobs: jobs:
rename_pr: rename_pr:
name: "Rename PR"
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}} if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps: steps:
- uses: actions/checkout@v4
- id: generate_token - id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v2
with: with:
@ -26,13 +26,23 @@ jobs:
env: env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }} GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: | run: |
title=$(gh pr view ${{ github.event.pull_request.number }} --json "title" -q ".title") title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT" echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename - name: Rename
env: env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }} GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: | run: |
gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ steps.generate_token.outputs.token }}

23
.gitignore vendored
View File

@ -217,3 +217,26 @@ source_docs/
### Docker ### ### Docker ###
docker-compose.override.yml docker-compose.override.yml
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
node_modules/
tsconfig.tsbuildinfo
# Wireit's cache
.wireit
custom-elements.json
### Development ###
.drafts

View File

@ -4,12 +4,16 @@
**/LICENSE **/LICENSE
authentik/stages/**/* authentik/stages/**/*
authentik/sources/**/*
schemas/**/*
blueprints/**/*
## Build asset directories ## Build asset directories
coverage coverage
dist dist
out out
.docusaurus .docusaurus
.wireit
website/docs/developer-docs/api/**/* website/docs/developer-docs/api/**/*
## Environment ## Environment
@ -32,14 +36,15 @@ coverage
# Templates # Templates
# TODO: Rename affected files to *.template.* or similar. # TODO: Rename affected files to *.template.* or similar.
authentik/**/*.html
*.html *.html
*.mdx *.mdx
*.md *.md
## Import order matters ## Import order matters
poly.ts web/src/poly.ts
src/locale-codes.ts web/src/locale-codes.ts
src/locales/ web/src/locales/
# Storybook # Storybook
storybook-static/ storybook-static/

View File

@ -17,6 +17,6 @@
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"redhat.vscode-yaml", "redhat.vscode-yaml",
"Tobermory.es6-string-html", "Tobermory.es6-string-html",
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx"
] ]
} }

72
.vscode/settings.json vendored
View File

@ -6,15 +6,13 @@
"!Context scalar", "!Context scalar",
"!Enumerate sequence", "!Enumerate sequence",
"!Env scalar", "!Env scalar",
"!Env sequence",
"!Find sequence", "!Find sequence",
"!Format sequence", "!Format sequence",
"!If sequence", "!If sequence",
"!Index scalar", "!Index scalar",
"!KeyOf scalar", "!KeyOf scalar",
"!Value scalar", "!Value scalar",
"!AtIndex scalar", "!AtIndex scalar"
"!ParseJSON scalar"
], ],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index", "typescript.preferences.importModuleSpecifierEnding": "index",
@ -32,5 +30,71 @@
} }
], ],
"go.testFlags": ["-count=1"], "go.testFlags": ["-count=1"],
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"] "github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"],
"eslint.useFlatConfig": true,
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.mjs": "*.d.mts",
"*.cjs": "*.d.cts",
"package.json": "package-lock.json, yarn.lock, .yarnrc, .yarnrc.yml, .yarn, .nvmrc, .node-version",
"tsconfig.json": "tsconfig.*.json, jsconfig.json",
"Dockerfile": "*.Dockerfile"
},
"search.exclude": {
"**/node_modules": true,
"**/*.code-search": true,
"**/dist": true,
"**/out": true,
"**/package-lock.json": true
},
"[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[shellscript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[django-html]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"editor.codeActionsOnSave": {
"source.removeUnusedImports": "explicit"
},
// We use Prettier for formatting, but specifying these settings
// will ensure that VS Code's IntelliSense doesn't autocomplete unformatted code.
"javascript.format.semicolons": "insert",
"typescript.format.semicolons": "insert",
"javascript.preferences.quoteStyle": "double",
"typescript.preferences.quoteStyle": "double",
"github.copilot.enable": {
"*": true,
"plaintext": true,
"markdown": true,
"scminput": false,
"csv": false,
"json": true,
"yaml": true
}
} }

40
.vscode/tasks.json vendored
View File

@ -4,12 +4,7 @@
{ {
"label": "authentik/core: make", "label": "authentik/core: make",
"command": "uv", "command": "uv",
"args": [ "args": ["run", "make", "lint-fix", "lint"],
"run",
"make",
"lint-fix",
"lint"
],
"presentation": { "presentation": {
"panel": "new" "panel": "new"
}, },
@ -18,11 +13,7 @@
{ {
"label": "authentik/core: run", "label": "authentik/core: run",
"command": "uv", "command": "uv",
"args": [ "args": ["run", "ak", "server"],
"run",
"ak",
"server"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -32,17 +23,13 @@
{ {
"label": "authentik/web: make", "label": "authentik/web: make",
"command": "make", "command": "make",
"args": [ "args": ["web"],
"web"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/web: watch", "label": "authentik/web: watch",
"command": "make", "command": "make",
"args": [ "args": ["web-watch"],
"web-watch"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -52,26 +39,19 @@
{ {
"label": "authentik: install", "label": "authentik: install",
"command": "make", "command": "make",
"args": [ "args": ["install", "-j4"],
"install",
"-j4"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/website: make", "label": "authentik/website: make",
"command": "make", "command": "make",
"args": [ "args": ["website"],
"website"
],
"group": "build" "group": "build"
}, },
{ {
"label": "authentik/website: watch", "label": "authentik/website: watch",
"command": "make", "command": "make",
"args": [ "args": ["website-watch"],
"website-watch"
],
"group": "build", "group": "build",
"presentation": { "presentation": {
"panel": "dedicated", "panel": "dedicated",
@ -81,11 +61,7 @@
{ {
"label": "authentik/api: generate", "label": "authentik/api: generate",
"command": "uv", "command": "uv",
"args": [ "args": ["run", "make", "gen"],
"run",
"make",
"gen"
],
"group": "build" "group": "build"
} }
] ]

View File

@ -1,30 +1,31 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
# Stage 1: Build webui # Stage 1 Web UI and Documentation build
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24-slim AS node-builder
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
ENV NODE_ENV=production ENV NODE_ENV=production
WORKDIR /work/web WORKDIR /work
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \ COPY ./package.json ./package.json
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \ COPY ./package-lock.json ./package-lock.json
--mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \ COPY ./packages ./packages
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \ COPY ./web ./web
--mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \ COPY ./website ./website
npm ci --include=dev
COPY ./package.json /work COPY ./gen-ts-api ./gen-ts-api
COPY ./web /work/web/ COPY ./blueprints ./blueprints
COPY ./website /work/website/ COPY ./schema.yml ./schema.yml
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api COPY ./SECURITY.md ./SECURITY.md
RUN npm run build && \ RUN --mount=type=cache,target=/root/.npm npm ci --include=dev
npm run build:sfe
RUN npm run build-bundled -w @goauthentik/docs
RUN npm run build -w @goauthentik/web
# Stage 2: Build go proxy # Stage 2: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
ARG TARGETOS ARG TARGETOS
@ -49,8 +50,8 @@ RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
COPY ./cmd /go/src/goauthentik.io/cmd COPY ./cmd /go/src/goauthentik.io/cmd
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
COPY ./web/static.go /go/src/goauthentik.io/web/static.go COPY ./web/static.go /go/src/goauthentik.io/web/static.go
COPY --from=node-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
COPY --from=node-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
COPY ./internal /go/src/goauthentik.io/internal COPY ./internal /go/src/goauthentik.io/internal
COPY ./go.mod /go/src/goauthentik.io/go.mod COPY ./go.mod /go/src/goauthentik.io/go.mod
COPY ./go.sum /go/src/goauthentik.io/go.sum COPY ./go.sum /go/src/goauthentik.io/go.sum
@ -62,22 +63,25 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
go build -o /go/authentik ./cmd/server go build -o /go/authentik ./cmd/server
# Stage 3: MaxMind GeoIP # Stage 3: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN" ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1" ENV GEOIPUPDATE_VERBOSE="1"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID" ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
USER root USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \ mkdir -p /usr/share/GeoIP && \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 4: Download uv # Stage 4: Download uv
FROM ghcr.io/astral-sh/uv:0.7.17 AS uv FROM ghcr.io/astral-sh/uv:0.6.14 AS uv
# Stage 5: Base python image # Stage 5: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \ ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \ PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
@ -168,8 +172,9 @@ COPY ./lifecycle/ /lifecycle
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
COPY --from=go-builder /go/authentik /bin/authentik COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/.venv /ak-root/.venv COPY --from=python-deps /ak-root/.venv /ak-root/.venv
COPY --from=node-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=node-builder /work/web/authentik/ /web/authentik/ COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=web-builder /work/website/build/ /website/help/
COPY --from=geoip /usr/share/GeoIP /geoip COPY --from=geoip /usr/share/GeoIP /geoip
USER 1000 USER 1000

144
Makefile
View File

@ -1,7 +1,6 @@
.PHONY: gen dev-reset all clean test web website .PHONY: gen dev-reset all clean test web website
SHELL := /usr/bin/env bash .SHELLFLAGS += ${SHELLFLAGS} -e
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
PWD = $(shell pwd) PWD = $(shell pwd)
UID = $(shell id -u) UID = $(shell id -u)
GID = $(shell id -g) GID = $(shell id -g)
@ -9,9 +8,9 @@ NPM_VERSION = $(shell python -m scripts.generate_semver)
PY_SOURCES = authentik tests scripts lifecycle .github PY_SOURCES = authentik tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test" DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = gen-ts-api GEN_API_TS = "gen-ts-api"
GEN_API_PY = gen-py-api GEN_API_PY = "gen-py-api"
GEN_API_GO = gen-go-api GEN_API_GO = "gen-go-api"
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null) pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null) pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
@ -37,6 +36,13 @@ test: ## Run the server tests and produce a coverage report (locally)
uv run coverage html uv run coverage html
uv run coverage report uv run coverage report
node-check-compile: ## Check and compile the TypeScript source code
npm run typecheck
node-lint-fix: ## Lint and automatically fix errors in the javascript source code
lint-codespell
npm run lint:fix
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors. lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
uv run black $(PY_SOURCES) uv run black $(PY_SOURCES)
uv run ruff check --fix $(PY_SOURCES) uv run ruff check --fix $(PY_SOURCES)
@ -48,9 +54,6 @@ lint: ## Lint the python and golang sources
uv run bandit -c pyproject.toml -r $(PY_SOURCES) uv run bandit -c pyproject.toml -r $(PY_SOURCES)
golangci-lint run -v golangci-lint run -v
core-install:
uv sync --frozen
migrate: ## Run the Authentik Django server's migrations migrate: ## Run the Authentik Django server's migrations
uv run python -m lifecycle.migrate uv run python -m lifecycle.migrate
@ -73,7 +76,9 @@ core-i18n-extract:
--ignore website \ --ignore website \
-l en -l en
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core` install: ## Install all requires dependencies for `web`, `website` and `core`
npm ci
uv sync --frozen
dev-drop-db: dev-drop-db:
dropdb -U ${pg_user} -h ${pg_host} ${pg_name} dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
@ -86,10 +91,6 @@ dev-create-db:
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state. dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
######################### #########################
## API Schema ## API Schema
######################### #########################
@ -98,7 +99,8 @@ gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
uv run ak make_blueprint_schema --file blueprints/schema.json uv run ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \ AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \ AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \ AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
@ -106,35 +108,35 @@ gen-build: ## Extract the schema from the database
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
docker run \ docker run \
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-diff:2.1.0-beta.8 \ docker.io/openapitools/openapi-diff:2.1.0-beta.8 \
--markdown /local/diff.md \ --markdown /local/diff.md \
/local/old_schema.yml /local/schema.yml /local/old_schema.yml /local/schema.yml
rm old_schema.yml rm old_schema.yml
sed -i 's/{/&#123;/g' diff.md sed -i 's/{/&#123;/g' diff.md
sed -i 's/}/&#125;/g' diff.md sed -i 's/}/&#125;/g' diff.md
npx prettier --write diff.md npx prettier --write diff.md
gen-clean-ts: ## Remove generated API client for Typescript gen-clean-ts: ## Remove generated API client for Typescript
rm -rf ${PWD}/${GEN_API_TS}/ rm -rf ./${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/ rm -rf ./web/node_modules/@goauthentik/api/
gen-clean-go: ## Remove generated API client for Go gen-clean-go: ## Remove generated API client for Go
mkdir -p ${PWD}/${GEN_API_GO} rm -rf ./${GEN_API_GO}/
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
make -C ${PWD}/${GEN_API_GO} clean
else
rm -rf ${PWD}/${GEN_API_GO}
endif
gen-clean-py: ## Remove generated API client for Python gen-clean-py: ## Remove generated API client for Python
rm -rf ${PWD}/${GEN_API_PY}/ rm -rf ./${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
@ -143,41 +145,59 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
-i /local/schema.yml \ --input-spec /local/schema.yml \
-g typescript-fetch \ --generator-name typescript-fetch \
-o /local/${GEN_API_TS} \ --output /local/${GEN_API_TS} \
-c /local/scripts/api-ts-config.yaml \ --config /local/scripts/api-ts-config.yaml \
--additional-properties=npmVersion=${NPM_VERSION} \ --additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \ --git-repo-id authentik \
--git-user-id goauthentik --git-user-id goauthentik
cd ${PWD}/${GEN_API_TS} && npm link npm install
cd ${PWD}/web && npm link @goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \ docker run \
--rm -v ${PWD}:/local \ --rm -v ${PWD}:/local \
--user ${UID}:${GID} \ --user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \ docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
-i /local/schema.yml \ --input-spec /local/schema.yml \
-g python \ --generator-name python \
-o /local/${GEN_API_PY} \ --output /local/${GEN_API_PY} \
-c /local/scripts/api-py-config.yaml \ --config /local/scripts/api-py-config.yaml \
--additional-properties=packageVersion=${NPM_VERSION} \ --additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \ --git-repo-id authentik \
--git-user-id goauthentik --git-user-id goauthentik
pip install ./${GEN_API_PY}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO} mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
ifeq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO} wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml \
else -O ./${GEN_API_GO}/config.yaml
cd ${PWD}/${GEN_API_GO} && git pull
endif wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache \
cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO} -O ./${GEN_API_GO}/templates/README.mustache
make -C ${PWD}/${GEN_API_GO} build
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache \
-O ./${GEN_API_GO}/templates/go.mod.mustache
cp schema.yml ./${GEN_API_GO}/
docker run \
--rm -v ${PWD}/${GEN_API_GO}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
--input-spec /local/schema.yml \
--generator-name go \
--output /local/ \
--config /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO} go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
gen-dev-config: ## Generate a local development config file gen-dev-config: ## Generate a local development config file
uv run scripts/generate_config.py uv run scripts/generate_config.py
@ -187,56 +207,38 @@ gen: gen-build gen-client-ts
## Web ## Web
######################### #########################
web-build: web-install ## Build the Authentik UI web: web-lint-fix web-lint node-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
cd web && npm run build
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
web-install: ## Install the necessary libraries to build the Authentik UI
cd web && npm ci
web-test: ## Run tests for the Authentik UI web-test: ## Run tests for the Authentik UI
cd web && npm run test npm run test -w @goauthentik/web
web-watch: ## Build and watch the Authentik UI for changes, updating automatically web-watch: ## Build and watch the Authentik UI for changes, updating automatically
rm -rf web/dist/ npm run watch -w @goauthentik/web
mkdir web/dist/
touch web/dist/.gitkeep
cd web && npm run watch
web-storybook-watch: ## Build and run the storybook documentation server web-storybook-watch: ## Build and run the storybook documentation server
cd web && npm run storybook npm run storybook -w @goauthentik/web
web-lint-fix: web-lint-fix:
cd web && npm run prettier npm run prettier -w @goauthentik/web
web-lint: web-lint:
cd web && npm run lint npm run lint -w @goauthentik/web
cd web && npm run lit-analyse npm run lit-analyse -w @goauthentik/web
web-check-compile:
cd web && npm run tsc
web-i18n-extract: web-i18n-extract:
cd web && npm run extract-locales npm run extract-locales -w @goauthentik/web
######################### #########################
## Website ## Website
######################### #########################
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it website: node-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
website-install:
cd website && npm ci
website-lint-fix: lint-codespell
cd website && npm run prettier
website-build: website-build:
cd website && npm run build npm run build -w @goauthentik/docs
website-watch: ## Build and watch the documentation website, updating automatically website-watch: ## Build and watch the documentation website, updating automatically
cd website && npm run watch npm run watch -w @goauthentik/docs
######################### #########################
## Docker ## Docker
@ -247,7 +249,7 @@ docker: ## Build a docker image of the current source tree
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE} DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
test-docker: test-docker:
BUILD=true ${PWD}/scripts/test_docker.sh BUILD=true ./scripts/test_docker.sh
######################### #########################
## CI ## CI

View File

@ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md)
## Adoption and Contributions ## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github). Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).

View File

@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported | | Version | Supported |
| --------- | --------- | | --------- | --------- |
| 2025.4.x | ✅ | | 2024.12.x | ✅ |
| 2025.6.x | ✅ | | 2025.2.x | ✅ |
## Reporting a Vulnerability ## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ from os import environ
__version__ = "2025.6.3" __version__ = "2025.2.4"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -0,0 +1,79 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.models import EventAction
class CoordinateSerializer(PassiveSerializer):
"""Coordinates for diagrams"""
x_cord = IntegerField(read_only=True)
y_cord = IntegerField(read_only=True)
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Login Metrics per 1h"""
serializer = LoginMetricsSerializer(True)
serializer.context["user"] = request.user
return Response(serializer.data)

View File

@ -1,7 +1,6 @@
"""authentik administration overview""" """authentik administration overview"""
from django.core.cache import cache from django.core.cache import cache
from django_tenants.utils import get_public_schema_name
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from packaging.version import parse from packaging.version import parse
from rest_framework.fields import SerializerMethodField from rest_framework.fields import SerializerMethodField
@ -14,7 +13,6 @@ from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.models import Outpost from authentik.outposts.models import Outpost
from authentik.tenants.utils import get_current_tenant
class VersionSerializer(PassiveSerializer): class VersionSerializer(PassiveSerializer):
@ -37,8 +35,6 @@ class VersionSerializer(PassiveSerializer):
def get_version_latest(self, _) -> str: def get_version_latest(self, _) -> str:
"""Get latest version from cache""" """Get latest version from cache"""
if get_current_tenant().schema_name == get_public_schema_name():
return __version__
version_in_cache = cache.get(VERSION_CACHE_KEY) version_in_cache = cache.get(VERSION_CACHE_KEY)
if not version_in_cache: # pragma: no cover if not version_in_cache: # pragma: no cover
update_latest_version.delay() update_latest_version.delay()

View File

@ -14,19 +14,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
label = "authentik_admin" label = "authentik_admin"
verbose_name = "authentik Admin" verbose_name = "authentik Admin"
default = True default = True
@ManagedAppConfig.reconcile_global
def clear_update_notifications(self):
"""Clear update notifications on startup if the notification was for the version
we're running now."""
from packaging.version import parse
from authentik.admin.tasks import LOCAL_VERSION
from authentik.events.models import EventAction, Notification
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()

View File

@ -1,7 +1,6 @@
"""authentik admin settings""" """authentik admin settings"""
from celery.schedules import crontab from celery.schedules import crontab
from django_tenants.utils import get_public_schema_name
from authentik.lib.utils.time import fqdn_rand from authentik.lib.utils.time import fqdn_rand
@ -9,7 +8,6 @@ CELERY_BEAT_SCHEDULE = {
"admin_latest_version": { "admin_latest_version": {
"task": "authentik.admin.tasks.update_latest_version", "task": "authentik.admin.tasks.update_latest_version",
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"), "schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
"tenant_schemas": [get_public_schema_name()],
"options": {"queue": "authentik_scheduled"}, "options": {"queue": "authentik_scheduled"},
} }
} }

View File

@ -1,6 +1,7 @@
"""authentik admin tasks""" """authentik admin tasks"""
from django.core.cache import cache from django.core.cache import cache
from django.db import DatabaseError, InternalError, ProgrammingError
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from packaging.version import parse from packaging.version import parse
from requests import RequestException from requests import RequestException
@ -8,7 +9,7 @@ from structlog.stdlib import get_logger
from authentik import __version__, get_build_hash from authentik import __version__, get_build_hash
from authentik.admin.apps import PROM_INFO from authentik.admin.apps import PROM_INFO
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction, Notification
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session from authentik.lib.utils.http import get_http_session
@ -32,6 +33,20 @@ def _set_prom_info():
) )
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError),
)
def clear_update_notifications():
"""Clear update notifications on startup if the notification was for the version
we're running now."""
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()
@CELERY_APP.task(bind=True, base=SystemTask) @CELERY_APP.task(bind=True, base=SystemTask)
@prefill_task @prefill_task
def update_latest_version(self: SystemTask): def update_latest_version(self: SystemTask):

View File

@ -36,6 +36,11 @@ class TestAdminAPI(TestCase):
body = loads(response.content) body = loads(response.content)
self.assertEqual(len(body), 0) self.assertEqual(len(body), 0)
def test_metrics(self):
"""Test metrics API"""
response = self.client.get(reverse("authentik_api:admin_metrics"))
self.assertEqual(response.status_code, 200)
def test_apps(self): def test_apps(self):
"""Test apps API""" """Test apps API"""
response = self.client.get(reverse("authentik_api:apps-list")) response = self.client.get(reverse("authentik_api:apps-list"))

View File

@ -1,12 +1,12 @@
"""test admin tasks""" """test admin tasks"""
from django.apps import apps
from django.core.cache import cache from django.core.cache import cache
from django.test import TestCase from django.test import TestCase
from requests_mock import Mocker from requests_mock import Mocker
from authentik.admin.tasks import ( from authentik.admin.tasks import (
VERSION_CACHE_KEY, VERSION_CACHE_KEY,
clear_update_notifications,
update_latest_version, update_latest_version,
) )
from authentik.events.models import Event, EventAction from authentik.events.models import Event, EventAction
@ -72,13 +72,12 @@ class TestAdminTasks(TestCase):
def test_clear_update_notifications(self): def test_clear_update_notifications(self):
"""Test clear of previous notification""" """Test clear of previous notification"""
admin_config = apps.get_app_config("authentik_admin")
Event.objects.create( Event.objects.create(
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"} action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
) )
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"}) Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={}) Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
admin_config.clear_update_notifications() clear_update_notifications()
self.assertFalse( self.assertFalse(
Event.objects.filter( Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1" action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"

View File

@ -3,6 +3,7 @@
from django.urls import path from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView from authentik.admin.api.version import VersionView
from authentik.admin.api.version_history import VersionHistoryViewSet from authentik.admin.api.version_history import VersionHistoryViewSet
@ -11,6 +12,11 @@ from authentik.admin.api.workers import WorkerView
api_urlpatterns = [ api_urlpatterns = [
("admin/apps", AppsViewSet, "apps"), ("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"), ("admin/models", ModelViewSet, "models"),
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"), path("admin/version/", VersionView.as_view(), name="admin_version"),
("admin/version/history", VersionHistoryViewSet, "version_history"), ("admin/version/history", VersionHistoryViewSet, "version_history"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"), path("admin/workers/", WorkerView.as_view(), name="admin_workers"),

View File

@ -1,13 +1,12 @@
"""authentik API AppConfig""" """authentik API AppConfig"""
from authentik.blueprints.apps import ManagedAppConfig from django.apps import AppConfig
class AuthentikAPIConfig(ManagedAppConfig): class AuthentikAPIConfig(AppConfig):
"""authentik API Config""" """authentik API Config"""
name = "authentik.api" name = "authentik.api"
label = "authentik_api" label = "authentik_api"
mountpoint = "api/" mountpoint = "api/"
verbose_name = "authentik API" verbose_name = "authentik API"
default = True

View File

@ -1,12 +1,9 @@
"""API Authentication""" """API Authentication"""
from hmac import compare_digest from hmac import compare_digest
from pathlib import Path
from tempfile import gettempdir
from typing import Any from typing import Any
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from drf_spectacular.extensions import OpenApiAuthenticationExtension from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed from rest_framework.exceptions import AuthenticationFailed
@ -14,17 +11,11 @@ from rest_framework.request import Request
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User, UserTypes from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger() LOGGER = get_logger()
_tmp = Path(gettempdir())
try:
with open(_tmp / "authentik-core-ipc.key") as _f:
ipc_key = _f.read()
except OSError:
ipc_key = None
def validate_auth(header: bytes) -> str | None: def validate_auth(header: bytes) -> str | None:
@ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
if user: if user:
CTX_AUTH_VIA.set("secret_key") CTX_AUTH_VIA.set("secret_key")
return user return user
# then try to auth via secret key (for embedded outpost/etc)
user = token_ipc(auth_credentials)
if user:
CTX_AUTH_VIA.set("ipc")
return user
raise AuthenticationFailed("Token invalid/expired") raise AuthenticationFailed("Token invalid/expired")
@ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None:
return outpost.user return outpost.user
class IPCUser(AnonymousUser):
"""'Virtual' user for IPC communication between authentik core and the authentik router"""
username = "authentik:system"
is_active = True
is_superuser = True
@property
def type(self):
return UserTypes.INTERNAL_SERVICE_ACCOUNT
def has_perm(self, perm, obj=None):
return True
def has_perms(self, perm_list, obj=None):
return True
def has_module_perms(self, module):
return True
@property
def is_anonymous(self):
return False
@property
def is_authenticated(self):
return True
def token_ipc(value: str) -> User | None:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
if not ipc_key or not compare_digest(value, ipc_key):
return None
return IPCUser()
class TokenAuthentication(BaseAuthentication): class TokenAuthentication(BaseAuthentication):
"""Token-based authentication using HTTP Bearer authentication""" """Token-based authentication using HTTP Bearer authentication"""

View File

@ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
return component return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints. """Workaround to set a default response for endpoints.
Workaround suggested at Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>

View File

@ -72,33 +72,20 @@ class Command(BaseCommand):
"additionalProperties": True, "additionalProperties": True,
}, },
"entries": { "entries": {
"anyOf": [ "type": "array",
{ "items": {
"type": "array", "oneOf": [],
"items": {"$ref": "#/$defs/blueprint_entry"}, },
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {"$ref": "#/$defs/blueprint_entry"},
},
},
],
}, },
}, },
"$defs": {"blueprint_entry": {"oneOf": []}}, "$defs": {},
} }
def add_arguments(self, parser):
parser.add_argument("--file", type=str)
@no_translations @no_translations
def handle(self, *args, file: str, **options): def handle(self, *args, **options):
"""Generate JSON Schema for blueprints""" """Generate JSON Schema for blueprints"""
self.build() self.build()
with open(file, "w") as _schema: self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
_schema.write(dumps(self.schema, indent=4, default=Command.json_default))
@staticmethod @staticmethod
def json_default(value: Any) -> Any: def json_default(value: Any) -> Any:
@ -125,7 +112,7 @@ class Command(BaseCommand):
} }
) )
model_path = f"{model._meta.app_label}.{model._meta.model_name}" model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["$defs"]["blueprint_entry"]["oneOf"].append( self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, model, serializer) self.template_entry(model_path, model, serializer)
) )
@ -147,7 +134,7 @@ class Command(BaseCommand):
"id": {"type": "string"}, "id": {"type": "string"},
"state": { "state": {
"type": "string", "type": "string",
"enum": sorted([s.value for s in BlueprintEntryDesiredState]), "enum": [s.value for s in BlueprintEntryDesiredState],
"default": "present", "default": "present",
}, },
"conditions": {"type": "array", "items": {"type": "boolean"}}, "conditions": {"type": "array", "items": {"type": "boolean"}},
@ -218,7 +205,7 @@ class Command(BaseCommand):
"type": "object", "type": "object",
"required": ["permission"], "required": ["permission"],
"properties": { "properties": {
"permission": {"type": "string", "enum": sorted(perms)}, "permission": {"type": "string", "enum": perms},
"user": {"type": "integer"}, "user": {"type": "integer"},
"role": {"type": "string"}, "role": {"type": "string"},
}, },

View File

@ -1,11 +1,10 @@
version: 1 version: 1
entries: entries:
foo: - identifiers:
- identifiers: name: "%(id)s"
name: "%(id)s" slug: "%(id)s"
slug: "%(id)s" model: authentik_flows.flow
model: authentik_flows.flow state: present
state: present attrs:
attrs: designation: stage_configuration
designation: stage_configuration title: foo
title: foo

View File

@ -37,7 +37,6 @@ entries:
- attrs: - attrs:
attributes: attributes:
env_null: !Env [bar-baz, null] env_null: !Env [bar-baz, null]
json_parse: !ParseJSON '{"foo": "bar"}'
policy_pk1: policy_pk1:
!Format [ !Format [
"%s-%s", "%s-%s",

View File

@ -1,14 +0,0 @@
from django.test import TestCase
from authentik.blueprints.apps import ManagedAppConfig
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.utils.reflection import get_apps
class TestManagedAppConfig(TestCase):
def test_apps_use_managed_app_config(self):
for app in get_apps():
if app.name.startswith("authentik.enterprise"):
self.assertIn(EnterpriseConfig, app.__class__.__bases__)
else:
self.assertIn(ManagedAppConfig, app.__class__.__bases__)

View File

@ -35,6 +35,6 @@ def blueprint_tester(file_name: Path) -> Callable:
for blueprint_file in Path("blueprints/").glob("**/*.yaml"): for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
if "local" in str(blueprint_file) or "testing" in str(blueprint_file): if "local" in str(blueprint_file):
continue continue
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file)) setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))

View File

@ -5,6 +5,7 @@ from collections.abc import Callable
from django.apps import apps from django.apps import apps
from django.test import TestCase from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
from authentik.providers.oauth2.models import RefreshToken from authentik.providers.oauth2.models import RefreshToken
@ -21,13 +22,10 @@ def serializer_tester_factory(test_model: type[SerializerModel]) -> Callable:
return return
model_class = test_model() model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel)) self.assertTrue(isinstance(model_class, SerializerModel))
# Models that have subclasses don't have to have a serializer
if len(test_model.__subclasses__()) > 0:
return
self.assertIsNotNone(model_class.serializer) self.assertIsNotNone(model_class.serializer)
if model_class.serializer.Meta().model == RefreshToken: if model_class.serializer.Meta().model == RefreshToken:
return return
self.assertTrue(issubclass(test_model, model_class.serializer.Meta().model)) self.assertEqual(model_class.serializer.Meta().model, test_model)
return tester return tester
@ -36,6 +34,6 @@ for app in apps.get_app_configs():
if not app.label.startswith("authentik"): if not app.label.startswith("authentik"):
continue continue
for model in app.get_models(): for model in app.get_models():
if not issubclass(model, SerializerModel): if not is_model_allowed(model):
continue continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model)) setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -215,7 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
}, },
"nested_context": "context-nested-value", "nested_context": "context-nested-value",
"env_null": None, "env_null": None,
"json_parse": {"foo": "bar"},
"at_index_sequence": "foo", "at_index_sequence": "foo",
"at_index_sequence_default": "non existent", "at_index_sequence_default": "non existent",
"at_index_mapping": 2, "at_index_mapping": 2,

View File

@ -6,7 +6,6 @@ from copy import copy
from dataclasses import asdict, dataclass, field, is_dataclass from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum from enum import Enum
from functools import reduce from functools import reduce
from json import JSONDecodeError, loads
from operator import ixor from operator import ixor
from os import getenv from os import getenv
from typing import Any, Literal, Union from typing import Any, Literal, Union
@ -165,7 +164,9 @@ class BlueprintEntry:
"""Get the blueprint model, with yaml tags resolved if present""" """Get the blueprint model, with yaml tags resolved if present"""
return str(self.tag_resolver(self.model, blueprint)) return str(self.tag_resolver(self.model, blueprint))
def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]: def get_permissions(
self, blueprint: "Blueprint"
) -> Generator[BlueprintEntryPermission, None, None]:
"""Get permissions of this entry, with all yaml tags resolved""" """Get permissions of this entry, with all yaml tags resolved"""
for perm in self.permissions: for perm in self.permissions:
yield BlueprintEntryPermission( yield BlueprintEntryPermission(
@ -192,18 +193,11 @@ class Blueprint:
"""Dataclass used for a full export""" """Dataclass used for a full export"""
version: int = field(default=1) version: int = field(default=1)
entries: list[BlueprintEntry] | dict[str, list[BlueprintEntry]] = field(default_factory=list) entries: list[BlueprintEntry] = field(default_factory=list)
context: dict = field(default_factory=dict) context: dict = field(default_factory=dict)
metadata: BlueprintMetadata | None = field(default=None) metadata: BlueprintMetadata | None = field(default=None)
def iter_entries(self) -> Iterable[BlueprintEntry]:
if isinstance(self.entries, dict):
for _section, entries in self.entries.items():
yield from entries
else:
yield from self.entries
class YAMLTag: class YAMLTag:
"""Base class for all YAML Tags""" """Base class for all YAML Tags"""
@ -234,7 +228,7 @@ class KeyOf(YAMLTag):
self.id_from = node.value self.id_from = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
for _entry in blueprint.iter_entries(): for _entry in blueprint.entries:
if _entry.id == self.id_from and _entry._state.instance: if _entry.id == self.id_from and _entry._state.instance:
# Special handling for PolicyBindingModels, as they'll have a different PK # Special handling for PolicyBindingModels, as they'll have a different PK
# which is used when creating policy bindings # which is used when creating policy bindings
@ -292,22 +286,6 @@ class Context(YAMLTag):
return value return value
class ParseJSON(YAMLTag):
"""Parse JSON from context/env/etc value"""
raw: str
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.raw = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
return loads(self.raw)
except JSONDecodeError as exc:
raise EntryInvalidError.from_entry(exc, entry) from exc
class Format(YAMLTag): class Format(YAMLTag):
"""Format a string""" """Format a string"""
@ -683,7 +661,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Value", Value) self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index) self.add_constructor("!Index", Index)
self.add_constructor("!AtIndex", AtIndex) self.add_constructor("!AtIndex", AtIndex)
self.add_constructor("!ParseJSON", ParseJSON)
class EntryInvalidError(SentryIgnoredException): class EntryInvalidError(SentryIgnoredException):

View File

@ -384,7 +384,7 @@ class Importer:
def _apply_models(self, raise_errors=False) -> bool: def _apply_models(self, raise_errors=False) -> bool:
"""Apply (create/update) models yaml""" """Apply (create/update) models yaml"""
self.__pk_map = {} self.__pk_map = {}
for entry in self._import.iter_entries(): for entry in self._import.entries:
model_app_label, model_name = entry.get_model(self._import).split(".") model_app_label, model_name = entry.get_model(self._import).split(".")
try: try:
model: type[SerializerModel] = registry.get_model(model_app_label, model_name) model: type[SerializerModel] = registry.get_model(model_app_label, model_name)

View File

@ -47,7 +47,7 @@ class MetaModelRegistry:
models = apps.get_models() models = apps.get_models()
for _, value in self.models.items(): for _, value in self.models.items():
models.append(value) models.append(value)
return sorted(models, key=str) return models
def get_model(self, app_label: str, model_id: str) -> type[Model]: def get_model(self, app_label: str, model_id: str) -> type[Model]:
"""Get model checks if any virtual models are registered, and falls back """Get model checks if any virtual models are registered, and falls back

View File

@ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer):
"flow_device_code", "flow_device_code",
"default_application", "default_application",
"web_certificate", "web_certificate",
"client_certificates",
"attributes", "attributes",
] ]
extra_kwargs = { extra_kwargs = {
@ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"domain", "domain",
"branding_title", "branding_title",
"web_certificate__name", "web_certificate__name",
"client_certificates__name",
] ]
filterset_fields = [ filterset_fields = [
"brand_uuid", "brand_uuid",
@ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"flow_user_settings", "flow_user_settings",
"flow_device_code", "flow_device_code",
"web_certificate", "web_certificate",
"client_certificates",
] ]
ordering = ["domain"] ordering = ["domain"]

View File

@ -1,9 +1,9 @@
"""authentik brands app""" """authentik brands app"""
from authentik.blueprints.apps import ManagedAppConfig from django.apps import AppConfig
class AuthentikBrandsConfig(ManagedAppConfig): class AuthentikBrandsConfig(AppConfig):
"""authentik Brand app""" """authentik Brand app"""
name = "authentik.brands" name = "authentik.brands"
@ -12,4 +12,3 @@ class AuthentikBrandsConfig(ManagedAppConfig):
mountpoints = { mountpoints = {
"authentik.brands.urls_root": "", "authentik.brands.urls_root": "",
} }
default = True

View File

@ -16,7 +16,7 @@ def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
if not path.exists(): if not path.exists():
return return
css = path.read_text() css = path.read_text()
Brand.objects.using(db_alias).all().update(branding_custom_css=css) Brand.objects.using(db_alias).update(branding_custom_css=css)
class Migration(migrations.Migration): class Migration(migrations.Migration):

View File

@ -1,37 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-19 15:09
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0009_brand_branding_default_flow_background"),
("authentik_crypto", "0004_alter_certificatekeypair_name"),
]
operations = [
migrations.AddField(
model_name="brand",
name="client_certificates",
field=models.ManyToManyField(
blank=True,
default=None,
help_text="Certificates used for client authentication.",
to="authentik_crypto.certificatekeypair",
),
),
migrations.AlterField(
model_name="brand",
name="web_certificate",
field=models.ForeignKey(
default=None,
help_text="Web Certificate used by the authentik Core webserver.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="+",
to="authentik_crypto.certificatekeypair",
),
),
]

View File

@ -73,13 +73,6 @@ class Brand(SerializerModel):
default=None, default=None,
on_delete=models.SET_DEFAULT, on_delete=models.SET_DEFAULT,
help_text=_("Web Certificate used by the authentik Core webserver."), help_text=_("Web Certificate used by the authentik Core webserver."),
related_name="+",
)
client_certificates = models.ManyToManyField(
CertificateKeyPair,
default=None,
blank=True,
help_text=_("Certificates used for client authentication."),
) )
attributes = models.JSONField(default=dict, blank=True) attributes = models.JSONField(default=dict, blank=True)

View File

@ -148,14 +148,3 @@ class TestBrands(APITestCase):
"default_locale": "", "default_locale": "",
}, },
) )
def test_custom_css(self):
"""Test custom_css"""
brand = create_test_brand()
brand.branding_custom_css = """* {
font-family: "Foo bar";
}"""
brand.save()
res = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(res.status_code, 200)
self.assertIn(brand.branding_custom_css, res.content.decode())

View File

@ -5,12 +5,10 @@ from typing import Any
from django.db.models import F, Q from django.db.models import F, Q
from django.db.models import Value as V from django.db.models import Value as V
from django.http.request import HttpRequest from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes from sentry_sdk import get_current_span
from django.utils.safestring import mark_safe
from authentik import get_full_version from authentik import get_full_version
from authentik.brands.models import Brand from authentik.brands.models import Brand
from authentik.lib.sentry import get_http_meta
from authentik.tenants.models import Tenant from authentik.tenants.models import Tenant
_q_default = Q(default=True) _q_default = Q(default=True)
@ -34,14 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"""Context Processor that injects brand object into every template""" """Context Processor that injects brand object into every template"""
brand = getattr(request, "brand", DEFAULT_BRAND) brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant()) tenant = getattr(request, "tenant", Tenant())
# similarly to `json_script` we escape everything HTML-related, however django trace = ""
# only directly exposes this as a function that also wraps it in a <script> tag span = get_current_span()
# which we dont want for CSS if span:
brand_css = mark_safe(str(brand.branding_custom_css).translate(_json_script_escapes)) # nosec trace = span.to_traceparent()
return { return {
"brand": brand, "brand": brand,
"brand_css": brand_css,
"footer_links": tenant.footer_links, "footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()}, "sentry_trace": trace,
"version": get_full_version(), "version": get_full_version(),
} }

View File

@ -2,9 +2,11 @@
from collections.abc import Iterator from collections.abc import Iterator
from copy import copy from copy import copy
from datetime import timedelta
from django.core.cache import cache from django.core.cache import cache
from django.db.models import QuerySet from django.db.models import QuerySet
from django.db.models.functions import ExtractHour
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
@ -18,6 +20,7 @@ from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.api.pagination import Pagination from authentik.api.pagination import Pagination
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.providers import ProviderSerializer from authentik.core.api.providers import ProviderSerializer
@ -25,6 +28,7 @@ from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer from authentik.core.api.utils import ModelSerializer
from authentik.core.models import Application, User from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.events.models import EventAction
from authentik.lib.utils.file import ( from authentik.lib.utils.file import (
FilePathSerializer, FilePathSerializer,
FileUploadSerializer, FileUploadSerializer,
@ -317,3 +321,18 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Set application icon (as URL)""" """Set application icon (as URL)"""
app: Application = self.get_object() app: Application = self.get_object()
return set_file_url(request, app, "meta_icon") return set_file_url(request, app, "meta_icon")
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
def metrics(self, request: Request, slug: str):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)

View File

@ -1,6 +1,8 @@
"""Authenticator Devices API Views""" """Authenticator Devices API Views"""
from drf_spectacular.utils import extend_schema from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import ( from rest_framework.fields import (
BooleanField, BooleanField,
@ -13,7 +15,6 @@ from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.viewsets import ViewSet from rest_framework.viewsets import ViewSet
from authentik.core.api.users import ParamUserSerializer
from authentik.core.api.utils import MetaNameSerializer from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user from authentik.stages.authenticator import device_classes, devices_for_user
@ -22,7 +23,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
class DeviceSerializer(MetaNameSerializer): class DeviceSerializer(MetaNameSerializer):
"""Serializer for authenticator devices""" """Serializer for Duo authenticator devices"""
pk = CharField() pk = CharField()
name = CharField() name = CharField()
@ -32,27 +33,22 @@ class DeviceSerializer(MetaNameSerializer):
last_updated = DateTimeField(read_only=True) last_updated = DateTimeField(read_only=True)
last_used = DateTimeField(read_only=True, allow_null=True) last_used = DateTimeField(read_only=True, allow_null=True)
extra_description = SerializerMethodField() extra_description = SerializerMethodField()
external_id = SerializerMethodField()
def get_type(self, instance: Device) -> str: def get_type(self, instance: Device) -> str:
"""Get type of device""" """Get type of device"""
return instance._meta.label return instance._meta.label
def get_extra_description(self, instance: Device) -> str | None: def get_extra_description(self, instance: Device) -> str:
"""Get extra description""" """Get extra description"""
if isinstance(instance, WebAuthnDevice): if isinstance(instance, WebAuthnDevice):
return instance.device_type.description if instance.device_type else None return (
instance.device_type.description
if instance.device_type
else _("Extra description not available")
)
if isinstance(instance, EndpointDevice): if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel") return instance.data.get("deviceSignals", {}).get("deviceModel")
return None return ""
def get_external_id(self, instance: Device) -> str | None:
"""Get external Device ID"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.aaguid if instance.device_type else None
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None
class DeviceViewSet(ViewSet): class DeviceViewSet(ViewSet):
@ -61,6 +57,7 @@ class DeviceViewSet(ViewSet):
serializer_class = DeviceSerializer serializer_class = DeviceSerializer
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
@extend_schema(responses={200: DeviceSerializer(many=True)})
def list(self, request: Request) -> Response: def list(self, request: Request) -> Response:
"""Get all devices for current user""" """Get all devices for current user"""
devices = devices_for_user(request.user) devices = devices_for_user(request.user)
@ -82,11 +79,18 @@ class AdminDeviceViewSet(ViewSet):
yield from device_set yield from device_set
@extend_schema( @extend_schema(
parameters=[ParamUserSerializer], parameters=[
OpenApiParameter(
name="user",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.INT,
)
],
responses={200: DeviceSerializer(many=True)}, responses={200: DeviceSerializer(many=True)},
) )
def list(self, request: Request) -> Response: def list(self, request: Request) -> Response:
"""Get all devices for current user""" """Get all devices for current user"""
args = ParamUserSerializer(data=request.query_params) kwargs = {}
args.is_valid(raise_exception=True) if "user" in request.query_params:
return Response(DeviceSerializer(self.get_devices(**args.validated_data), many=True).data) kwargs = {"user": request.query_params["user"]}
return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)

View File

@ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer):
if superuser if superuser
else "authentik_core.disable_group_superuser" else "authentik_core.disable_group_superuser"
) )
if self.instance or superuser: has_perm = user.has_perm(perm)
has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance) if self.instance and not has_perm:
if not has_perm: has_perm = user.has_perm(perm, self.instance)
raise ValidationError( if not has_perm:
_( raise ValidationError(
( _(
"User does not have permission to set " (
"superuser status to {superuser_status}." "User does not have permission to set "
).format_map({"superuser_status": superuser}) "superuser status to {superuser_status}."
) ).format_map({"superuser_status": superuser})
) )
)
return superuser return superuser
class Meta: class Meta:

View File

@ -6,6 +6,7 @@ from typing import Any
from django.contrib.auth import update_session_auth_hash from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import Permission from django.contrib.auth.models import Permission
from django.db.models.functions import ExtractHour
from django.db.transaction import atomic from django.db.transaction import atomic
from django.db.utils import IntegrityError from django.db.utils import IntegrityError
from django.urls import reverse_lazy from django.urls import reverse_lazy
@ -51,6 +52,7 @@ from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.brands.models import Brand from authentik.brands.models import Brand
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
@ -82,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.avatars import get_avatar from authentik.lib.avatars import get_avatar
from authentik.rbac.decorators import permission_required from authentik.rbac.decorators import permission_required
from authentik.rbac.models import get_permission_choices from authentik.rbac.models import get_permission_choices
from authentik.stages.email.flow import pickle_flow_token_for_email
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage from authentik.stages.email.utils import TemplateEmailMessage
@ -90,12 +91,6 @@ from authentik.stages.email.utils import TemplateEmailMessage
LOGGER = get_logger() LOGGER = get_logger()
class ParamUserSerializer(PassiveSerializer):
"""Partial serializer for query parameters to select a user"""
user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False)
class UserGroupSerializer(ModelSerializer): class UserGroupSerializer(ModelSerializer):
"""Simplified Group Serializer for user's groups""" """Simplified Group Serializer for user's groups"""
@ -321,6 +316,53 @@ class SessionUserSerializer(PassiveSerializer):
original = UserSelfSerializer(required=False) original = UserSelfSerializer(required=False)
class UserMetricsSerializer(PassiveSerializer):
"""User Metrics"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED, context__username=user.username
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
)
class UsersFilter(FilterSet): class UsersFilter(FilterSet):
"""Filter for users""" """Filter for users"""
@ -392,23 +434,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
queryset = User.objects.none() queryset = User.objects.none()
ordering = ["username"] ordering = ["username"]
serializer_class = UserSerializer serializer_class = UserSerializer
filterset_class = UsersFilter
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"] search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
filterset_class = UsersFilter
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField
return [
StrField(User, "username"),
StrField(User, "name"),
StrField(User, "email"),
StrField(User, "path"),
BoolField(User, "is_active", nullable=True),
ChoiceSearchField(User, "type"),
JSONSearchField(User, "attributes", suggest_nested=False),
]
def get_queryset(self): def get_queryset(self):
base_qs = User.objects.all().exclude_anonymous() base_qs = User.objects.all().exclude_anonymous()
@ -424,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
def list(self, request, *args, **kwargs): def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs) return super().list(request, *args, **kwargs)
def _create_recovery_link(self, for_email=False) -> tuple[str, Token]: def _create_recovery_link(self) -> tuple[str, Token]:
"""Create a recovery link (when the current brand has a recovery flow set), """Create a recovery link (when the current brand has a recovery flow set),
that can either be shown to an admin or sent to the user directly""" that can either be shown to an admin or sent to the user directly"""
brand: Brand = self.request._request.brand brand: Brand = self.request._request.brand
@ -446,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
raise ValidationError( raise ValidationError(
{"non_field_errors": "Recovery flow not applicable to user"} {"non_field_errors": "Recovery flow not applicable to user"}
) from None ) from None
_plan = FlowToken.pickle(plan)
if for_email:
_plan = pickle_flow_token_for_email(plan)
token, __ = FlowToken.objects.update_or_create( token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset", identifier=f"{user.uid}-password-reset",
defaults={ defaults={
"user": user, "user": user,
"flow": flow, "flow": flow,
"_plan": _plan, "_plan": FlowToken.pickle(plan),
"revoke_on_execution": not for_email,
}, },
) )
querystring = urlencode({QS_KEY_TOKEN: token.key}) querystring = urlencode({QS_KEY_TOKEN: token.key})
@ -579,6 +602,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
update_session_auth_hash(self.request, user) update_session_auth_hash(self.request, user)
return Response(status=204) return Response(status=204)
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
@action(detail=True, pagination_class=None, filter_backends=[])
def metrics(self, request: Request, pk: int) -> Response:
"""User metrics per 1h"""
user: User = self.get_object()
serializer = UserMetricsSerializer(instance={})
serializer.context["user"] = user
serializer.context["request"] = request
return Response(serializer.data)
@permission_required("authentik_core.reset_user_password") @permission_required("authentik_core.reset_user_password")
@extend_schema( @extend_schema(
responses={ responses={
@ -614,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if for_user.email == "": if for_user.email == "":
LOGGER.debug("User doesn't have an email address") LOGGER.debug("User doesn't have an email address")
raise ValidationError({"non_field_errors": "User does not have an email address set."}) raise ValidationError({"non_field_errors": "User does not have an email address set."})
link, token = self._create_recovery_link(for_email=True) link, token = self._create_recovery_link()
# Lookup the email stage to assure the current user can access it # Lookup the email stage to assure the current user can access it
stages = get_objects_for_user( stages = get_objects_for_user(
request.user, "authentik_stages_email.view_emailstage" request.user, "authentik_stages_email.view_emailstage"

View File

@ -2,7 +2,6 @@
from typing import Any from typing import Any
from django.db import models
from django.db.models import Model from django.db.models import Model
from drf_spectacular.extensions import OpenApiSerializerFieldExtension from drf_spectacular.extensions import OpenApiSerializerFieldExtension
from drf_spectacular.plumbing import build_basic_type from drf_spectacular.plumbing import build_basic_type
@ -31,27 +30,7 @@ def is_dict(value: Any):
raise ValidationError("Value must be a dictionary, and not have any duplicate keys.") raise ValidationError("Value must be a dictionary, and not have any duplicate keys.")
class JSONDictField(JSONField):
"""JSON Field which only allows dictionaries"""
default_validators = [is_dict]
class JSONExtension(OpenApiSerializerFieldExtension):
"""Generate API Schema for JSON fields as"""
target_class = "authentik.core.api.utils.JSONDictField"
def map_serializer_field(self, auto_schema, direction):
return build_basic_type(OpenApiTypes.OBJECT)
class ModelSerializer(BaseModelSerializer): class ModelSerializer(BaseModelSerializer):
# By default, JSON fields we have are used to store dictionaries
serializer_field_mapping = BaseModelSerializer.serializer_field_mapping.copy()
serializer_field_mapping[models.JSONField] = JSONDictField
def create(self, validated_data): def create(self, validated_data):
instance = super().create(validated_data) instance = super().create(validated_data)
@ -92,6 +71,21 @@ class ModelSerializer(BaseModelSerializer):
return instance return instance
class JSONDictField(JSONField):
"""JSON Field which only allows dictionaries"""
default_validators = [is_dict]
class JSONExtension(OpenApiSerializerFieldExtension):
"""Generate API Schema for JSON fields as"""
target_class = "authentik.core.api.utils.JSONDictField"
def map_serializer_field(self, auto_schema, direction):
return build_basic_type(OpenApiTypes.OBJECT)
class PassiveSerializer(Serializer): class PassiveSerializer(Serializer):
"""Base serializer class which doesn't implement create/update methods""" """Base serializer class which doesn't implement create/update methods"""

View File

@ -13,6 +13,7 @@ class Command(TenantCommand):
parser.add_argument("usernames", nargs="*", type=str) parser.add_argument("usernames", nargs="*", type=str)
def handle_per_tenant(self, **options): def handle_per_tenant(self, **options):
print(options)
new_type = UserTypes(options["type"]) new_type = UserTypes(options["type"])
qs = ( qs = (
User.objects.exclude_anonymous() User.objects.exclude_anonymous()

View File

@ -2,7 +2,6 @@
from django.apps import apps from django.apps import apps
from django.contrib.auth.management import create_permissions from django.contrib.auth.management import create_permissions
from django.core.management import call_command
from django.core.management.base import BaseCommand, no_translations from django.core.management.base import BaseCommand, no_translations
from guardian.management import create_anonymous_user from guardian.management import create_anonymous_user
@ -17,10 +16,6 @@ class Command(BaseCommand):
"""Check permissions for all apps""" """Check permissions for all apps"""
for tenant in Tenant.objects.filter(ready=True): for tenant in Tenant.objects.filter(ready=True):
with tenant: with tenant:
# See https://code.djangoproject.com/ticket/28417
# Remove potential lingering old permissions
call_command("remove_stale_contenttypes", "--no-input")
for app in apps.get_app_configs(): for app in apps.get_app_configs():
self.stdout.write(f"Checking app {app.name} ({app.label})\n") self.stdout.write(f"Checking app {app.name} ({app.label})\n")
create_permissions(app, verbosity=0) create_permissions(app, verbosity=0)

View File

@ -31,10 +31,7 @@ class PickleSerializer:
def loads(self, data): def loads(self, data):
"""Unpickle data to be loaded from redis""" """Unpickle data to be loaded from redis"""
try: return pickle.loads(data) # nosec
return pickle.loads(data) # nosec
except Exception:
return {}
def _migrate_session( def _migrate_session(
@ -79,7 +76,6 @@ def _migrate_session(
AuthenticatedSession.objects.using(db_alias).create( AuthenticatedSession.objects.using(db_alias).create(
session=session, session=session,
user=old_auth_session.user, user=old_auth_session.user,
uuid=old_auth_session.uuid,
) )

View File

@ -1,103 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-14 11:15
from django.apps.registry import Apps, apps as global_apps
from django.db import migrations
from django.contrib.contenttypes.management import create_contenttypes
from django.contrib.auth.management import create_permissions
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
"""Migrate permissions from OldAuthenticatedSession to AuthenticatedSession"""
db_alias = schema_editor.connection.alias
# `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the
# real config for creating permissions and content types
authentik_core_config = global_apps.get_app_config("authentik_core")
# These are only ran by django after all migrations, but we need them right now.
# `global_apps` is needed,
create_permissions(authentik_core_config, using=db_alias, verbosity=1)
create_contenttypes(authentik_core_config, using=db_alias, verbosity=1)
# But from now on, this is just a regular migration, so use `apps`
Permission = apps.get_model("auth", "Permission")
ContentType = apps.get_model("contenttypes", "ContentType")
try:
old_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="oldauthenticatedsession"
)
new_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="authenticatedsession"
)
except ContentType.DoesNotExist:
# This should exist at this point, but if not, let's cut our losses
return
# Get all permissions for the old content type
old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct)
# Create equivalent permissions for the new content type
for old_perm in old_perms:
new_perm = (
Permission.objects.using(db_alias)
.filter(
content_type=new_ct,
codename=old_perm.codename,
)
.first()
)
if not new_perm:
# This should exist at this point, but if not, let's cut our losses
continue
# Global user permissions
User = apps.get_model("authentik_core", "User")
User.user_permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Global role permissions
DjangoGroup = apps.get_model("auth", "Group")
DjangoGroup.permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Object user permissions
UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
# Object role permissions
GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission")
GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
def remove_old_authenticated_session_content_type(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
):
db_alias = schema_editor.connection.alias
ContentType = apps.get_model("contenttypes", "ContentType")
ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0047_delete_oldauthenticatedsession"),
]
operations = [
migrations.RunPython(
code=migrate_authenticated_session_permissions,
reverse_code=migrations.RunPython.noop,
),
migrations.RunPython(
code=remove_old_authenticated_session_content_type,
reverse_code=migrations.RunPython.noop,
),
]

View File

@ -18,7 +18,7 @@ from django.http import HttpRequest
from django.utils.functional import SimpleLazyObject, cached_property from django.utils.functional import SimpleLazyObject, cached_property
from django.utils.timezone import now from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django_cte import CTE, with_cte from django_cte import CTEQuerySet, With
from guardian.conf import settings from guardian.conf import settings
from guardian.mixins import GuardianUserMixin from guardian.mixins import GuardianUserMixin
from model_utils.managers import InheritanceManager from model_utils.managers import InheritanceManager
@ -136,7 +136,7 @@ class AttributesMixin(models.Model):
return instance, False return instance, False
class GroupQuerySet(QuerySet): class GroupQuerySet(CTEQuerySet):
def with_children_recursive(self): def with_children_recursive(self):
"""Recursively get all groups that have the current queryset as parents """Recursively get all groups that have the current queryset as parents
or are indirectly related.""" or are indirectly related."""
@ -165,9 +165,9 @@ class GroupQuerySet(QuerySet):
) )
# Build the recursive query, see above # Build the recursive query, see above
cte = CTE.recursive(make_cte) cte = With.recursive(make_cte)
# Return the result, as a usable queryset for Group. # Return the result, as a usable queryset for Group.
return with_cte(cte, select=cte.join(Group, group_uuid=cte.col.group_uuid)) return cte.join(Group, group_uuid=cte.col.group_uuid).with_cte(cte)
class Group(SerializerModel, AttributesMixin): class Group(SerializerModel, AttributesMixin):
@ -1082,12 +1082,6 @@ class AuthenticatedSession(SerializerModel):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
@property
def serializer(self) -> type[Serializer]:
from authentik.core.api.authenticated_sessions import AuthenticatedSessionSerializer
return AuthenticatedSessionSerializer
class Meta: class Meta:
verbose_name = _("Authenticated Session") verbose_name = _("Authenticated Session")
verbose_name_plural = _("Authenticated Sessions") verbose_name_plural = _("Authenticated Sessions")

View File

@ -2,20 +2,22 @@
{% get_current_language as LANGUAGE_CODE %} {% get_current_language as LANGUAGE_CODE %}
<script> <script>
window.authentik = { window.authentik = {
locale: "{{ LANGUAGE_CODE }}", locale: "{{ LANGUAGE_CODE }}",
config: JSON.parse('{{ config_json|escapejs }}'), config: JSON.parse("{{ config_json|escapejs }}" || "{}"),
brand: JSON.parse('{{ brand_json|escapejs }}'), brand: JSON.parse("{{ brand_json|escapejs }}" || "{}"),
versionFamily: "{{ version_family }}", versionFamily: "{{ version_family }}",
versionSubdomain: "{{ version_subdomain }}", versionSubdomain: "{{ version_subdomain }}",
build: "{{ build }}", build: "{{ build }}",
api: { api: {
base: "{{ base_url }}", base: "{{ base_url }}",
relBase: "{{ base_url_rel }}", relBase: "{{ base_url_rel }}",
}, },
}; };
{% if messages %}
window.addEventListener("DOMContentLoaded", function () { window.addEventListener("DOMContentLoaded", function () {
{% for message in messages %} {% for message in messages %}
window.dispatchEvent( window.dispatchEvent(
new CustomEvent("ak-message", { new CustomEvent("ak-message", {
bubbles: true, bubbles: true,
@ -26,6 +28,7 @@
}, },
}), }),
); );
{% endfor %} {% endfor %}
}); });
{% endif %}
</script> </script>

View File

@ -2,33 +2,79 @@
{% load i18n %} {% load i18n %}
{% load authentik_core %} {% load authentik_core %}
<!DOCTYPE html> <!doctype html>
<html> <html>
<head> <head>
<meta charset="UTF-8"> <meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" />
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
<meta name="darkreader-lock"> {% comment %}
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title> Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we
<link rel="icon" href="{{ brand.branding_favicon_url }}"> default to a dark theme based on preferred colour-scheme
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}"> {% endcomment %}
{% block head_before %}
{% endblock %} <meta name="darkreader-lock" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<style>{{ brand_css }}</style> <title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script> <link rel="icon" href="{{ brand.branding_favicon_url }}" />
{% block head %} <link rel="shortcut icon" href="{{ brand.branding_favicon_url }}" />
{% endblock %}
{% for key, value in html_meta.items %} {% block head_before %}
<meta name="{{key}}" content="{{ value }}" /> {% endblock %}
{% endfor %}
</head> <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}" />
<body>
{% block body %} <style data-test-id="color-scheme">
{% endblock %} @media (prefers-color-scheme: dark) {
{% block scripts %} :root {
{% endblock %} color-scheme: dark light;
</body> }
}
@media (prefers-color-scheme: light) {
:root {
color-scheme: light dark;
}
}
</style>
<style data-test-id="custom-branding-css">
{{ brand.branding_custom_css }}
</style>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
<script
src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}"
type="module"
></script>
{% block head %}
{% endblock %}
<meta name="sentry-trace" content="{{ sentry_trace }}" />
</head>
<body>
{% block body %}{% endblock %}
{% block scripts %}{% endblock %}
<noscript>
<style>
body {
font-family: var(--ak-font-family-base), sans-serif;
}
</style>
<h1>
JavaScript is required to use
{% trans title|default:brand.branding_title %}
</h1>
<p>
Please enable JavaScript in your browser settings and reload the page. If you are using a
browser extension that blocks JavaScript, please disable it for this site.
</p>
</noscript>
</body>
</html> </html>

View File

@ -4,14 +4,16 @@
{% block head %} {% block head %}
<script src="{% versioned_script 'dist/admin/AdminInterface-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/admin/AdminInterface-%v.js' %}" type="module"></script>
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)" />
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)" />
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
{% endblock %} {% endblock %}
{% block body %} {% block body %}
<ak-message-container alignment="bottom"></ak-message-container> <ak-message-container></ak-message-container>
<ak-interface-admin> <ak-interface-admin>
<ak-loading></ak-loading> <ak-loading></ak-loading>
</ak-interface-admin> </ak-interface-admin>
{% endblock %} {% endblock %}

View File

@ -13,9 +13,14 @@
{% block card %} {% block card %}
<form method="POST" class="pf-c-form"> <form method="POST" class="pf-c-form">
<p>{% trans message %}</p> <p>{% trans message %}</p>
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">
{% trans 'Go home' %} <a
</a> id="ak-back-home"
href="{% url 'authentik_core:root-redirect' %}"
class="pf-c-button pf-m-primary"
>
{% trans 'Go home' %}
</a>
</form> </form>
{% endblock %} {% endblock %}

View File

@ -4,14 +4,17 @@
{% block head %} {% block head %}
<script src="{% versioned_script 'dist/user/UserInterface-%v.js' %}" type="module"></script> <script src="{% versioned_script 'dist/user/UserInterface-%v.js' %}" type="module"></script>
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)" />
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)" />
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
{% endblock %} {% endblock %}
{% block body %} {% block body %}
<ak-message-container></ak-message-container> <ak-message-container></ak-message-container>
<ak-interface-user> <ak-interface-user>
<ak-loading></ak-loading> <ak-loading></ak-loading>
</ak-interface-user> </ak-interface-user>
{% endblock %} {% endblock %}

View File

@ -5,78 +5,82 @@
{% block head_before %} {% block head_before %}
<link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" /> <link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)"> <link
rel="stylesheet"
type="text/css"
href="{% static 'dist/theme-dark.css' %}"
media="(prefers-color-scheme: dark)"
/>
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
{% endblock %} {% endblock %}
{% block head %} {% block head %}
<style> <style data-test-id="base-full-root-styles">
:root { :root {
--ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}"); --ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}");
--pf-c-background-image--BackgroundImage: var(--ak-flow-background); --pf-c-background-image--BackgroundImage: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background); --pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background); --pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage--sm-2x: var(--ak-flow-background); --pf-c-background-image--BackgroundImage--sm-2x: var(--ak-flow-background);
--pf-c-background-image--BackgroundImage--lg: var(--ak-flow-background); --pf-c-background-image--BackgroundImage--lg: var(--ak-flow-background);
} }
/* Form with user */ /* Form with user */
.form-control-static { .form-control-static {
margin-top: var(--pf-global--spacer--sm); margin-top: var(--pf-global--spacer--sm);
display: flex; display: flex;
align-items: center; align-items: center;
justify-content: space-between; justify-content: space-between;
} }
.form-control-static .avatar { .form-control-static .avatar {
display: flex; display: flex;
align-items: center; align-items: center;
} }
.form-control-static img { .form-control-static img {
margin-right: var(--pf-global--spacer--xs); margin-right: var(--pf-global--spacer--xs);
} }
.form-control-static a { .form-control-static a {
padding-top: var(--pf-global--spacer--xs); padding-top: var(--pf-global--spacer--xs);
padding-bottom: var(--pf-global--spacer--xs); padding-bottom: var(--pf-global--spacer--xs);
line-height: var(--pf-global--spacer--xl); line-height: var(--pf-global--spacer--xl);
} }
</style> </style>
{% endblock %} {% endblock %}
{% block body %} {% block body %}
<div class="pf-c-background-image"> <div class="pf-c-background-image"></div>
</div>
<ak-message-container></ak-message-container> <ak-message-container></ak-message-container>
<div class="pf-c-login stacked"> <div class="pf-c-login stacked">
<div class="ak-login-container"> <div class="ak-login-container">
<main class="pf-c-login__main"> <main class="pf-c-login__main">
<div class="pf-c-login__main-header pf-c-brand ak-brand"> <div class="pf-c-login__main-header pf-c-brand ak-brand">
<img src="{{ brand.branding_logo_url }}" alt="authentik Logo" /> <img src="{{ brand.branding_logo_url }}" alt="authentik Logo" />
</div> </div>
<header class="pf-c-login__main-header"> <header class="pf-c-login__main-header">
<h1 class="pf-c-title pf-m-3xl"> <h1 class="pf-c-title pf-m-3xl">
{% block card_title %} {% block card_title %}
{% endblock %} {% endblock %}
</h1> </h1>
</header> </header>
<div class="pf-c-login__main-body"> <div class="pf-c-login__main-body">
{% block card %} {% block card %}
{% endblock %} {% endblock %}
</div> </div>
</main> </main>
<footer class="pf-c-login__footer"> <footer class="pf-c-login__footer">
<ul class="pf-c-list pf-m-inline"> <ul class="pf-c-list pf-m-inline">
{% for link in footer_links %} {% for link in footer_links %}
<li> <li>
<a href="{{ link.href }}">{{ link.name }}</a> <a href="{{ link.href }}">{{ link.name }}</a>
</li> </li>
{% endfor %} {% endfor %}
<li> <li>
<span> <span>
{% trans 'Powered by authentik' %} {% trans 'Powered by authentik' %}
</span> </span>
</li> </li>
</ul> </ul>
</footer> </footer>
</div> </div>
</div> </div>
{% endblock %} {% endblock %}

View File

@ -114,7 +114,6 @@ class TestApplicationsAPI(APITestCase):
self.assertJSONEqual( self.assertJSONEqual(
response.content.decode(), response.content.decode(),
{ {
"autocomplete": {},
"pagination": { "pagination": {
"next": 0, "next": 0,
"previous": 0, "previous": 0,
@ -168,7 +167,6 @@ class TestApplicationsAPI(APITestCase):
self.assertJSONEqual( self.assertJSONEqual(
response.content.decode(), response.content.decode(),
{ {
"autocomplete": {},
"pagination": { "pagination": {
"next": 0, "next": 0,
"previous": 0, "previous": 0,

View File

@ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase):
{"is_superuser": ["User does not have permission to set superuser status to True."]}, {"is_superuser": ["User does not have permission to set superuser status to True."]},
) )
def test_superuser_no_perm_no_superuser(self):
"""Test creating a group without permission and without superuser flag"""
assign_perm("authentik_core.add_group", self.login_user)
self.client.force_login(self.login_user)
res = self.client.post(
reverse("authentik_api:group-list"),
data={"name": generate_id(), "is_superuser": False},
)
self.assertEqual(res.status_code, 201)
def test_superuser_update_no_perm(self): def test_superuser_update_no_perm(self):
"""Test updating a superuser group without permission""" """Test updating a superuser group without permission"""
group = Group.objects.create(name=generate_id(), is_superuser=True) group = Group.objects.create(name=generate_id(), is_superuser=True)

View File

@ -13,10 +13,7 @@ from authentik.core.models import (
TokenIntents, TokenIntents,
User, User,
) )
from authentik.core.tasks import ( from authentik.core.tasks import clean_expired_models, clean_temporary_users
clean_expired_models,
clean_temporary_users,
)
from authentik.core.tests.utils import create_test_admin_user from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id

View File

@ -81,6 +81,22 @@ class TestUsersAPI(APITestCase):
response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"}) response = self.client.get(reverse("authentik_api:user-list"), {"include_groups": "true"})
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
def test_metrics(self):
"""Test user's metrics"""
self.client.force_login(self.admin)
response = self.client.get(
reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 200)
def test_metrics_denied(self):
"""Test user's metrics (non-superuser)"""
self.client.force_login(self.user)
response = self.client.get(
reverse("authentik_api:user-metrics", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 403)
def test_recovery_no_flow(self): def test_recovery_no_flow(self):
"""Test user recovery link (no recovery flow set)""" """Test user recovery link (no recovery flow set)"""
self.client.force_login(self.admin) self.client.force_login(self.admin)

View File

@ -30,7 +30,6 @@ from structlog.stdlib import get_logger
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.core.models import UserTypes
from authentik.crypto.apps import MANAGED_KEY from authentik.crypto.apps import MANAGED_KEY
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.models import CertificateKeyPair from authentik.crypto.models import CertificateKeyPair
@ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
def view_certificate(self, request: Request, pk: str) -> Response: def view_certificate(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs certificate and log access""" """Return certificate-key pairs certificate and log access"""
certificate: CertificateKeyPair = self.get_object() certificate: CertificateKeyPair = self.get_object()
if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: Event.new( # noqa # nosec
Event.new( # noqa # nosec EventAction.SECRET_VIEW,
EventAction.SECRET_VIEW, secret=certificate,
secret=certificate, type="certificate",
type="certificate", ).from_http(request)
).from_http(request)
if "download" in request.query_params: if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse( response = HttpResponse(
@ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
def view_private_key(self, request: Request, pk: str) -> Response: def view_private_key(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs private key and log access""" """Return certificate-key pairs private key and log access"""
certificate: CertificateKeyPair = self.get_object() certificate: CertificateKeyPair = self.get_object()
if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT: Event.new( # noqa # nosec
Event.new( # noqa # nosec EventAction.SECRET_VIEW,
EventAction.SECRET_VIEW, secret=certificate,
secret=certificate, type="private_key",
type="private_key", ).from_http(request)
).from_http(request)
if "download" in request.query_params: if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html # Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file") response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")

View File

@ -132,14 +132,13 @@ class LicenseKey:
"""Get a summarized version of all (not expired) licenses""" """Get a summarized version of all (not expired) licenses"""
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0) total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
for lic in License.objects.all(): for lic in License.objects.all():
if lic.is_valid: total.internal_users += lic.internal_users
total.internal_users += lic.internal_users total.external_users += lic.external_users
total.external_users += lic.external_users
total.license_flags.extend(lic.status.license_flags)
exp_ts = int(mktime(lic.expiry.timetuple())) exp_ts = int(mktime(lic.expiry.timetuple()))
if total.exp == 0: if total.exp == 0:
total.exp = exp_ts total.exp = exp_ts
total.exp = max(total.exp, exp_ts) total.exp = max(total.exp, exp_ts)
total.license_flags.extend(lic.status.license_flags)
return total return total
@staticmethod @staticmethod

View File

@ -39,10 +39,6 @@ class License(SerializerModel):
internal_users = models.BigIntegerField() internal_users = models.BigIntegerField()
external_users = models.BigIntegerField() external_users = models.BigIntegerField()
@property
def is_valid(self) -> bool:
return self.expiry >= now()
@property @property
def serializer(self) -> type[BaseSerializer]: def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.api import LicenseSerializer from authentik.enterprise.api import LicenseSerializer

View File

@ -1,27 +0,0 @@
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.policies.unique_password.models import UniquePasswordPolicy
from authentik.policies.api.policies import PolicySerializer
class UniquePasswordPolicySerializer(EnterpriseRequiredMixin, PolicySerializer):
"""Password Uniqueness Policy Serializer"""
class Meta:
model = UniquePasswordPolicy
fields = PolicySerializer.Meta.fields + [
"password_field",
"num_historical_passwords",
]
class UniquePasswordPolicyViewSet(UsedByMixin, ModelViewSet):
"""Password Uniqueness Policy Viewset"""
queryset = UniquePasswordPolicy.objects.all()
serializer_class = UniquePasswordPolicySerializer
filterset_fields = "__all__"
ordering = ["name"]
search_fields = ["name"]

View File

@ -1,10 +0,0 @@
"""authentik Unique Password policy app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
name = "authentik.enterprise.policies.unique_password"
label = "authentik_policies_unique_password"
verbose_name = "authentik Enterprise.Policies.Unique Password"
default = True

View File

@ -1,81 +0,0 @@
# Generated by Django 5.0.13 on 2025-03-26 23:02
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_policies", "0011_policybinding_failure_result_and_more"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="UniquePasswordPolicy",
fields=[
(
"policy_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_policies.policy",
),
),
(
"password_field",
models.TextField(
default="password",
help_text="Field key to check, field keys defined in Prompt stages are available.",
),
),
(
"num_historical_passwords",
models.PositiveIntegerField(
default=1, help_text="Number of passwords to check against."
),
),
],
options={
"verbose_name": "Password Uniqueness Policy",
"verbose_name_plural": "Password Uniqueness Policies",
"indexes": [
models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__f559aa_idx")
],
},
bases=("authentik_policies.policy",),
),
migrations.CreateModel(
name="UserPasswordHistory",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("old_password", models.CharField(max_length=128)),
("created_at", models.DateTimeField(auto_now_add=True)),
("hibp_prefix_sha1", models.CharField(max_length=5)),
("hibp_pw_hash", models.TextField()),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="old_passwords",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "User Password History",
},
),
]

View File

@ -1,151 +0,0 @@
from hashlib import sha1
from django.contrib.auth.hashers import identify_hasher, make_password
from django.db import models
from django.utils.translation import gettext as _
from rest_framework.serializers import BaseSerializer
from structlog.stdlib import get_logger
from authentik.core.models import User
from authentik.policies.models import Policy
from authentik.policies.types import PolicyRequest, PolicyResult
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
LOGGER = get_logger()
class UniquePasswordPolicy(Policy):
"""This policy prevents users from reusing old passwords."""
password_field = models.TextField(
default="password",
help_text=_("Field key to check, field keys defined in Prompt stages are available."),
)
# Limit on the number of previous passwords the policy evaluates
# Also controls number of old passwords the system stores.
num_historical_passwords = models.PositiveIntegerField(
default=1,
help_text=_("Number of passwords to check against."),
)
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.policies.unique_password.api import UniquePasswordPolicySerializer
return UniquePasswordPolicySerializer
@property
def component(self) -> str:
return "ak-policy-password-uniqueness-form"
def passes(self, request: PolicyRequest) -> PolicyResult:
from authentik.enterprise.policies.unique_password.models import UserPasswordHistory
password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get(
self.password_field, request.context.get(self.password_field)
)
if not password:
LOGGER.warning(
"Password field not found in request when checking UniquePasswordPolicy",
field=self.password_field,
fields=request.context.keys(),
)
return PolicyResult(False, _("Password not set in context"))
password = str(password)
if not self.num_historical_passwords:
# Policy not configured to check against any passwords
return PolicyResult(True)
num_to_check = self.num_historical_passwords
password_history = UserPasswordHistory.objects.filter(user=request.user).order_by(
"-created_at"
)[:num_to_check]
if not password_history:
return PolicyResult(True)
for record in password_history:
if not record.old_password:
continue
if self._passwords_match(new_password=password, old_password=record.old_password):
# Return on first match. Authentik does not consider timing attacks
# on old passwords to be an attack surface.
return PolicyResult(
False,
_("This password has been used previously. Please choose a different one."),
)
return PolicyResult(True)
def _passwords_match(self, *, new_password: str, old_password: str) -> bool:
try:
hasher = identify_hasher(old_password)
except ValueError:
LOGGER.warning(
"Skipping password; could not load hash algorithm",
)
return False
return hasher.verify(new_password, old_password)
@classmethod
def is_in_use(cls):
"""Check if any UniquePasswordPolicy is in use, either through policy bindings
or direct attachment to a PromptStage.
Returns:
bool: True if any policy is in use, False otherwise
"""
from authentik.policies.models import PolicyBinding
# Check if any policy is in use through bindings
if PolicyBinding.in_use.for_policy(cls).exists():
return True
# Check if any policy is attached to a PromptStage
if cls.objects.filter(promptstage__isnull=False).exists():
return True
return False
class Meta(Policy.PolicyMeta):
verbose_name = _("Password Uniqueness Policy")
verbose_name_plural = _("Password Uniqueness Policies")
class UserPasswordHistory(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="old_passwords")
# Mimic's column type of AbstractBaseUser.password
old_password = models.CharField(max_length=128)
created_at = models.DateTimeField(auto_now_add=True)
hibp_prefix_sha1 = models.CharField(max_length=5)
hibp_pw_hash = models.TextField()
class Meta:
verbose_name = _("User Password History")
def __str__(self) -> str:
timestamp = f"{self.created_at:%Y/%m/%d %X}" if self.created_at else "N/A"
return f"Previous Password (user: {self.user_id}, recorded: {timestamp})"
@classmethod
def create_for_user(cls, user: User, password: str):
# To check users' passwords against Have I been Pwned, we need the first 5 chars
# of the password hashed with SHA1 without a salt...
pw_hash_sha1 = sha1(password.encode("utf-8")).hexdigest() # nosec
# ...however that'll give us a list of hashes from HIBP, and to compare that we still
# need a full unsalted SHA1 of the password. We don't want to save that directly in
# the database, so we hash that SHA1 again with a modern hashing alg,
# and then when we check users' passwords against HIBP we can use `check_password`
# which will take care of this.
hibp_hash_hash = make_password(pw_hash_sha1)
return cls.objects.create(
user=user,
old_password=password,
hibp_prefix_sha1=pw_hash_sha1[:5],
hibp_pw_hash=hibp_hash_hash,
)

View File

@ -1,20 +0,0 @@
"""Unique Password Policy settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"policies_unique_password_trim_history": {
"task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories",
"schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"),
"options": {"queue": "authentik_scheduled"},
},
"policies_unique_password_check_purge": {
"task": (
"authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history"
),
"schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"),
"options": {"queue": "authentik_scheduled"},
},
}

Some files were not shown because too many files have changed in this diff Show More