Compare commits
61 Commits
core/refac
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
e8b5e4c127 | |||
81ec98b198 | |||
c46ab19e79 | |||
de9fc5de6b | |||
eab3d9b411 | |||
7cb40d786f | |||
b4fce08bbc | |||
8a2ba1c518 | |||
25b4306693 | |||
1e279950f1 | |||
960429355f | |||
b4f3748353 | |||
91d2445c61 | |||
dd8f809161 | |||
57a31b5dd1 | |||
09125b6236 | |||
832126c6fe | |||
25fe489b34 | |||
18078fd68f | |||
4fa71d995d | |||
22cec64234 | |||
a87cc27366 | |||
ad7ad1fa78 | |||
c70e609e50 | |||
5f08485fff | |||
3a2ed11821 | |||
ee04f39e28 | |||
2c6aa72f3c | |||
bd0afef790 | |||
fc11cc0a1a | |||
fb78303e8f | |||
2ea04440db | |||
96e1636be3 | |||
c546451a73 | |||
61778053b4 | |||
f5580d311d | |||
99d292bce0 | |||
b2801641bc | |||
bfaa1046b2 | |||
95c30400cc | |||
e77480ee1d | |||
905800e535 | |||
fadeaef4c6 | |||
437efda649 | |||
dd75d5f54b | |||
392a2e582e | |||
a1da183721 | |||
feea2df0b1 | |||
b47acd8c76 | |||
6fd87d9ced | |||
acbb065808 | |||
2fb097061d | |||
8962d17e03 | |||
8326e1490c | |||
091e4d3e4c | |||
6ee77edcbb | |||
763e2288bf | |||
9cdb177ca7 | |||
6070508058 | |||
ec13a5d84d | |||
057de82b01 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2025.4.0
|
||||
current_version = 2024.8.4
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
@ -17,8 +17,6 @@ optional_value = final
|
||||
|
||||
[bumpversion:file:pyproject.toml]
|
||||
|
||||
[bumpversion:file:uv.lock]
|
||||
|
||||
[bumpversion:file:package.json]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
@ -32,5 +30,3 @@ optional_value = final
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
||||
[bumpversion:file:web/src/common/constants.ts]
|
||||
|
||||
[bumpversion:file:lifecycle/aws/template.yaml]
|
||||
|
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -28,11 +28,7 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
@ -1,22 +0,0 @@
|
||||
---
|
||||
name: Documentation issue
|
||||
about: Suggest an improvement or report a problem
|
||||
title: ""
|
||||
labels: documentation
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
|
||||
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
|
||||
|
||||
**Provide the URL or link to the exact page in the documentation to which you are referring.**
|
||||
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the documentation issue here.
|
||||
|
||||
**Consider opening a PR!**
|
||||
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).
|
7
.github/ISSUE_TEMPLATE/question.md
vendored
7
.github/ISSUE_TEMPLATE/question.md
vendored
@ -20,12 +20,7 @@ Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
<!--
|
||||
Notice: authentik supports installation via Docker, Kubernetes, and AWS CloudFormation only. Support is not available for other methods. For detailed installation and configuration instructions, please refer to the official documentation at https://docs.goauthentik.io/docs/install-config/.
|
||||
-->
|
||||
|
||||
|
||||
- authentik version: [e.g. 2025.2.0]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
@ -35,6 +35,14 @@ runs:
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
@ -52,6 +60,18 @@ runs:
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
global:
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
edit-mode: replace
|
||||
|
20
.github/actions/docker-push-variables/action.yml
vendored
20
.github/actions/docker-push-variables/action.yml
vendored
@ -9,14 +9,11 @@ inputs:
|
||||
image-arch:
|
||||
required: false
|
||||
description: "Docker image arch"
|
||||
release:
|
||||
required: true
|
||||
description: "True if this is a release build, false if this is a dev/PR build"
|
||||
|
||||
outputs:
|
||||
shouldPush:
|
||||
description: "Whether to push the image or not"
|
||||
value: ${{ steps.ev.outputs.shouldPush }}
|
||||
shouldBuild:
|
||||
description: "Whether to build image or not"
|
||||
value: ${{ steps.ev.outputs.shouldBuild }}
|
||||
|
||||
sha:
|
||||
description: "sha"
|
||||
@ -32,24 +29,15 @@ outputs:
|
||||
imageTags:
|
||||
description: "Docker image tags"
|
||||
value: ${{ steps.ev.outputs.imageTags }}
|
||||
imageTagsJSON:
|
||||
description: "Docker image tags, as a JSON array"
|
||||
value: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
attestImageNames:
|
||||
description: "Docker image names used for attestation"
|
||||
value: ${{ steps.ev.outputs.attestImageNames }}
|
||||
cacheTo:
|
||||
description: "cache-to value for the docker build step"
|
||||
value: ${{ steps.ev.outputs.cacheTo }}
|
||||
imageMainTag:
|
||||
description: "Docker image main tag"
|
||||
value: ${{ steps.ev.outputs.imageMainTag }}
|
||||
imageMainName:
|
||||
description: "Docker image main name"
|
||||
value: ${{ steps.ev.outputs.imageMainName }}
|
||||
imageBuildArgs:
|
||||
description: "Docker image build args"
|
||||
value: ${{ steps.ev.outputs.imageBuildArgs }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -60,8 +48,6 @@ runs:
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||
RELEASE: ${{ inputs.release }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
python3 ${{ github.action_path }}/push_vars.py
|
||||
|
@ -2,20 +2,12 @@
|
||||
|
||||
import configparser
|
||||
import os
|
||||
from json import dumps
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
|
||||
# Decide if we should push the image or not
|
||||
should_push = True
|
||||
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
|
||||
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
|
||||
should_push = False
|
||||
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
|
||||
# Don't push on the internal repo
|
||||
should_push = False
|
||||
should_build = str(len(os.environ.get("DOCKER_USERNAME", "")) > 0).lower()
|
||||
|
||||
branch_name = os.environ["GITHUB_REF"]
|
||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
@ -44,11 +36,12 @@ if is_release:
|
||||
]
|
||||
if not prerelease:
|
||||
image_tags += [
|
||||
f"{name}:latest",
|
||||
f"{name}:{version_family}",
|
||||
]
|
||||
else:
|
||||
suffix = ""
|
||||
if image_arch:
|
||||
if image_arch and image_arch != "amd64":
|
||||
suffix = f"-{image_arch}"
|
||||
for name in image_names:
|
||||
image_tags += [
|
||||
@ -70,31 +63,12 @@ def get_attest_image_names(image_with_tags: list[str]):
|
||||
return ",".join(set(image_tags))
|
||||
|
||||
|
||||
# Generate `cache-to` param
|
||||
cache_to = ""
|
||||
if should_push:
|
||||
_cache_tag = "buildcache"
|
||||
if image_arch:
|
||||
_cache_tag += f"-{image_arch}"
|
||||
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||
|
||||
|
||||
image_build_args = []
|
||||
if os.getenv("RELEASE", "false").lower() == "true":
|
||||
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||
else:
|
||||
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||
image_build_args = "\n".join(image_build_args)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||
print(f"shouldBuild={should_build}", file=_output)
|
||||
print(f"sha={sha}", file=_output)
|
||||
print(f"version={version}", file=_output)
|
||||
print(f"prerelease={prerelease}", file=_output)
|
||||
print(f"imageTags={','.join(image_tags)}", file=_output)
|
||||
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
|
||||
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||
print(f"cacheTo={cache_to}", file=_output)
|
||||
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||
|
11
.github/actions/docker-push-variables/test.sh
vendored
11
.github/actions/docker-push-variables/test.sh
vendored
@ -1,18 +1,7 @@
|
||||
#!/bin/bash -x
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
# Non-pushing PR
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
||||
# Pushing PR/main
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
DOCKER_USERNAME=foo \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
24
.github/actions/setup/action.yml
vendored
24
.github/actions/setup/action.yml
vendored
@ -9,22 +9,17 @@ inputs:
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install apt deps
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Setup python
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Install Python deps
|
||||
shell: bash
|
||||
run: uv sync --all-extras --dev --frozen
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@ -35,20 +30,17 @@ runs:
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup docker cache
|
||||
uses: ScribeMD/docker-cache@0.5.0
|
||||
with:
|
||||
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
shell: uv run python {0}
|
||||
shell: poetry run python {0}
|
||||
run: |
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
||||
with open("local.env.yml", "w") as _config:
|
||||
|
2
.github/actions/setup/docker-compose.yml
vendored
2
.github/actions/setup/docker-compose.yml
vendored
@ -11,7 +11,7 @@ services:
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
33
.github/codespell-words.txt
vendored
33
.github/codespell-words.txt
vendored
@ -1,32 +1,7 @@
|
||||
akadmin
|
||||
asgi
|
||||
assertIn
|
||||
authentik
|
||||
authn
|
||||
crate
|
||||
docstrings
|
||||
entra
|
||||
goauthentik
|
||||
gunicorn
|
||||
hass
|
||||
jwe
|
||||
jwks
|
||||
keypair
|
||||
keypairs
|
||||
kubernetes
|
||||
oidc
|
||||
ontext
|
||||
openid
|
||||
passwordless
|
||||
plex
|
||||
saml
|
||||
scim
|
||||
singed
|
||||
slo
|
||||
sso
|
||||
totp
|
||||
traefik
|
||||
# https://github.com/codespell-project/codespell/issues/1224
|
||||
upToDate
|
||||
hass
|
||||
warmup
|
||||
webauthn
|
||||
ontext
|
||||
singed
|
||||
assertIn
|
||||
|
37
.github/dependabot.yml
vendored
37
.github/dependabot.yml
vendored
@ -23,6 +23,7 @@ updates:
|
||||
- package-ecosystem: npm
|
||||
directories:
|
||||
- "/web"
|
||||
- "/tests/wdio"
|
||||
- "/web/sfe"
|
||||
schedule:
|
||||
interval: daily
|
||||
@ -43,11 +44,9 @@ updates:
|
||||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@eslint/*"
|
||||
- "@typescript-eslint/*"
|
||||
- "eslint-*"
|
||||
- "eslint"
|
||||
- "typescript-eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
patterns:
|
||||
- "@storybook/*"
|
||||
@ -55,12 +54,10 @@ updates:
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
- "esbuild*"
|
||||
rollup:
|
||||
patterns:
|
||||
- "@rollup/*"
|
||||
- "rollup-*"
|
||||
- "rollup*"
|
||||
swc:
|
||||
patterns:
|
||||
- "@swc/*"
|
||||
@ -82,23 +79,7 @@ updates:
|
||||
docusaurus:
|
||||
patterns:
|
||||
- "@docusaurus/*"
|
||||
build:
|
||||
patterns:
|
||||
- "@swc/*"
|
||||
- "swc-*"
|
||||
- "lightningcss*"
|
||||
- "@rspack/binding*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/lifecycle/aws"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "lifecycle/aws:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: uv
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
@ -118,15 +99,3 @@ updates:
|
||||
prefix: "core:"
|
||||
labels:
|
||||
- dependencies
|
||||
- package-ecosystem: docker-compose
|
||||
directories:
|
||||
# - /scripts # Maybe
|
||||
- /tests/e2e
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
labels:
|
||||
- dependencies
|
||||
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -1,7 +1,7 @@
|
||||
<!--
|
||||
👋 Hi there! Welcome.
|
||||
|
||||
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute
|
||||
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
|
||||
-->
|
||||
|
||||
## Details
|
||||
|
@ -1,96 +0,0 @@
|
||||
# Re-usable workflow for a single-architecture build
|
||||
name: Single-arch Container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_name:
|
||||
required: true
|
||||
type: string
|
||||
image_arch:
|
||||
required: true
|
||||
type: string
|
||||
runs-on:
|
||||
required: true
|
||||
type: string
|
||||
registry_dockerhub:
|
||||
default: false
|
||||
type: boolean
|
||||
registry_ghcr:
|
||||
default: false
|
||||
type: boolean
|
||||
release:
|
||||
default: false
|
||||
type: boolean
|
||||
outputs:
|
||||
image-digest:
|
||||
value: ${{ jobs.build.outputs.image-digest }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.image_arch }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
outputs:
|
||||
image-digest: ${{ steps.push.outputs.digest }}
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-qemu-action@v3.6.0
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
image-arch: ${{ inputs.image_arch }}
|
||||
release: ${{ inputs.release }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty clients
|
||||
if: ${{ inputs.release }}
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: generate ts client
|
||||
if: ${{ !inputs.release }}
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
${{ steps.ev.outputs.imageBuildArgs }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
104
.github/workflows/_reusable-docker-build.yaml
vendored
104
.github/workflows/_reusable-docker-build.yaml
vendored
@ -1,104 +0,0 @@
|
||||
# Re-usable workflow for a multi-architecture build
|
||||
name: Multi-arch container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_name:
|
||||
required: true
|
||||
type: string
|
||||
registry_dockerhub:
|
||||
default: false
|
||||
type: boolean
|
||||
registry_ghcr:
|
||||
default: true
|
||||
type: boolean
|
||||
release:
|
||||
default: false
|
||||
type: boolean
|
||||
outputs: {}
|
||||
|
||||
jobs:
|
||||
build-server-amd64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
image_arch: amd64
|
||||
runs-on: ubuntu-latest
|
||||
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
build-server-arm64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
image_arch: arm64
|
||||
runs-on: ubuntu-22.04-arm
|
||||
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
get-tags:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build-server-amd64
|
||||
- build-server-arm64
|
||||
outputs:
|
||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
merge-server:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||
needs:
|
||||
- get-tags
|
||||
- build-server-amd64
|
||||
- build-server-arm64
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: int128/docker-manifest-create-action@v2
|
||||
id: build
|
||||
with:
|
||||
tags: ${{ matrix.tag }}
|
||||
sources: |
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
push-to-registry: true
|
2
.github/workflows/api-py-publish.yml
vendored
2
.github/workflows/api-py-publish.yml
vendored
@ -7,7 +7,6 @@ on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
@ -30,6 +29,7 @@ jobs:
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
cache: "poetry"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-py
|
||||
- name: Publish package
|
||||
|
3
.github/workflows/api-ts-publish.yml
vendored
3
.github/workflows/api-ts-publish.yml
vendored
@ -7,7 +7,6 @@ on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
@ -41,7 +40,7 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
46
.github/workflows/ci-aws-cfn.yml
vendored
46
.github/workflows/ci-aws-cfn.yml
vendored
@ -1,46 +0,0 @@
|
||||
name: authentik-ci-aws-cfn
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
check-changes-applied:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: lifecycle/aws/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: lifecycle/aws/package-lock.json
|
||||
- working-directory: lifecycle/aws/
|
||||
run: |
|
||||
npm ci
|
||||
- name: Check changes have been applied
|
||||
run: |
|
||||
uv run make aws-cfn
|
||||
git diff --exit-code
|
||||
ci-aws-cfn-mark:
|
||||
if: always()
|
||||
needs:
|
||||
- check-changes-applied
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
28
.github/workflows/ci-main-daily.yml
vendored
28
.github/workflows/ci-main-daily.yml
vendored
@ -1,28 +0,0 @@
|
||||
---
|
||||
name: authentik-ci-main-daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Every night at 3am
|
||||
- cron: "0 3 * * *"
|
||||
|
||||
jobs:
|
||||
test-container:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- docs
|
||||
- version-2025-2
|
||||
- version-2024-12
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
current="$(pwd)"
|
||||
dir="/tmp/authentik/${{ matrix.version }}"
|
||||
mkdir -p $dir
|
||||
cd $dir
|
||||
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||
${current}/scripts/test_docker.sh
|
152
.github/workflows/ci-main.yml
vendored
152
.github/workflows/ci-main.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
run: uv run make ci-${{ matrix.job }}
|
||||
run: poetry run make ci-${{ matrix.job }}
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -42,35 +42,26 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
run: uv run python -m lifecycle.migrate
|
||||
test-make-seed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: seed
|
||||
run: |
|
||||
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
seed: ${{ steps.seed.outputs.seed }}
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
rm -rf /home/runner/.cache/pypoetry
|
||||
# Copy current, latest config to local
|
||||
cp authentik/common/config/default.yml local.env.yml
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
||||
@ -81,7 +72,7 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run migrations to stable
|
||||
run: uv run python -m lifecycle.migrate
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
run: |
|
||||
set -x
|
||||
@ -89,34 +80,31 @@ jobs:
|
||||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
# Delete previous poetry env
|
||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: |
|
||||
uv run python -m lifecycle.migrate
|
||||
poetry run python -m lifecycle.migrate
|
||||
- name: run tests
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
poetry run make test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
@ -124,23 +112,14 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
env:
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
CI_RUN_ID: ${{ matrix.run_id }}
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: unit
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
@ -149,22 +128,16 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.12.0
|
||||
uses: helm/kind-action@v1.10.0
|
||||
- name: run integration
|
||||
run: |
|
||||
uv run coverage run manage.py test tests/integration
|
||||
uv run coverage xml
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: integration
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: integration
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
@ -195,7 +168,7 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
|
||||
docker compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@ -210,21 +183,14 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
uv run coverage xml
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
flags: e2e
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: e2e
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
ci-core-mark:
|
||||
if: always()
|
||||
needs:
|
||||
- lint
|
||||
- test-migrations
|
||||
@ -234,22 +200,70 @@ jobs:
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
- run: echo mark
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
needs: ci-core-mark
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/dev-server
|
||||
release: false
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-server
|
||||
image-arch: ${{ matrix.arch }}
|
||||
- name: Login to Container Registry
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
pr-comment:
|
||||
needs:
|
||||
- build
|
||||
@ -271,7 +285,7 @@ jobs:
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-server
|
||||
- name: Comment on PR
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: ${{ steps.ev.outputs.imageMainTag }}
|
||||
|
21
.github/workflows/ci-outpost.yml
vendored
21
.github/workflows/ci-outpost.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v8
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
version: latest
|
||||
args: --timeout 5000s --verbose
|
||||
@ -49,15 +49,12 @@ jobs:
|
||||
run: |
|
||||
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
|
||||
ci-outpost-mark:
|
||||
if: always()
|
||||
needs:
|
||||
- lint-golint
|
||||
- test-unittest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
- run: echo mark
|
||||
build-container:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
@ -72,7 +69,7 @@ jobs:
|
||||
- rac
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
@ -82,7 +79,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
@ -93,7 +90,7 @@ jobs:
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
|
||||
- name: Login to Container Registry
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@ -107,16 +104,16 @@ jobs:
|
||||
with:
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
cache-to: ${{ steps.ev.outputs.shouldBuild == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
25
.github/workflows/ci-web.yml
vendored
25
.github/workflows/ci-web.yml
vendored
@ -24,11 +24,17 @@ jobs:
|
||||
- prettier-check
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
include:
|
||||
- command: tsc
|
||||
project: web
|
||||
- command: lit-analyse
|
||||
project: web
|
||||
exclude:
|
||||
- command: lint:lockfile
|
||||
project: tests/wdio
|
||||
- command: tsc
|
||||
project: tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
@ -39,12 +45,21 @@ jobs:
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
run: |
|
||||
npm ci
|
||||
${{ matrix.extra_setup }}
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: Lint
|
||||
working-directory: ${{ matrix.project }}/
|
||||
run: npm run ${{ matrix.command }}
|
||||
ci-web-mark:
|
||||
needs:
|
||||
- lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -60,16 +75,6 @@ jobs:
|
||||
- name: build
|
||||
working-directory: web/
|
||||
run: npm run build
|
||||
ci-web-mark:
|
||||
if: always()
|
||||
needs:
|
||||
- build
|
||||
- lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
test:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
|
5
.github/workflows/ci-website.yml
vendored
5
.github/workflows/ci-website.yml
vendored
@ -62,13 +62,10 @@ jobs:
|
||||
working-directory: website/
|
||||
run: npm run ${{ matrix.job }}
|
||||
ci-website-mark:
|
||||
if: always()
|
||||
needs:
|
||||
- lint
|
||||
- test
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
- run: echo mark
|
||||
|
@ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "30 1 1,15 * *"
|
||||
- cron: '30 1 1,15 * *'
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -11,7 +11,6 @@ env:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
@ -24,8 +23,8 @@ jobs:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- run: uv run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- run: poetry run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
1
.github/workflows/ghcr-retention.yml
vendored
1
.github/workflows/ghcr-retention.yml
vendored
@ -7,7 +7,6 @@ on:
|
||||
|
||||
jobs:
|
||||
clean-ghcr:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
name: Delete old unused container images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
||||
with:
|
||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
id: cpr
|
||||
with:
|
||||
|
45
.github/workflows/packages-npm-publish.yml
vendored
45
.github/workflows/packages-npm-publish.yml
vendored
@ -1,45 +0,0 @@
|
||||
name: authentik-packages-npm-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- packages/docusaurus-config/**
|
||||
- packages/eslint-config/**
|
||||
- packages/prettier-config/**
|
||||
- packages/tsconfig/**
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
package:
|
||||
- docusaurus-config
|
||||
- eslint-config
|
||||
- prettier-config
|
||||
- tsconfig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: packages/${{ matrix.package }}/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
|
||||
with:
|
||||
files: |
|
||||
packages/${{ matrix.package }}/package.json
|
||||
- name: Publish package
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: packages/${{ matrix.package}}
|
||||
run: |
|
||||
npm ci
|
||||
npm run build
|
||||
npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
5
.github/workflows/publish-source-docs.yml
vendored
5
.github/workflows/publish-source-docs.yml
vendored
@ -12,7 +12,6 @@ env:
|
||||
|
||||
jobs:
|
||||
publish-source-docs:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
@ -21,8 +20,8 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
run: |
|
||||
uv run make migrate
|
||||
uv run ak build_source_docs
|
||||
poetry run make migrate
|
||||
poetry run ak build_source_docs
|
||||
- name: Publish
|
||||
uses: netlify/actions/cli@master
|
||||
with:
|
||||
|
1
.github/workflows/release-next-branch.yml
vendored
1
.github/workflows/release-next-branch.yml
vendored
@ -11,7 +11,6 @@ permissions:
|
||||
|
||||
jobs:
|
||||
update-next:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
environment: internal-production
|
||||
steps:
|
||||
|
86
.github/workflows/release-publish.yml
vendored
86
.github/workflows/release-publish.yml
vendored
@ -7,23 +7,64 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
attestations: write
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
release: true
|
||||
registry_dockerhub: true
|
||||
registry_ghcr: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
build-args: |
|
||||
VERSION=${{ github.ref }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload container images to ghcr.io
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
# Needed for attestation
|
||||
id-token: write
|
||||
@ -42,7 +83,7 @@ jobs:
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: prepare variables
|
||||
@ -78,7 +119,7 @@ jobs:
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
- uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
@ -128,27 +169,6 @@ jobs:
|
||||
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
tag: ${{ github.ref }}
|
||||
upload-aws-cfn-template:
|
||||
permissions:
|
||||
# Needed for AWS login
|
||||
id-token: write
|
||||
contents: read
|
||||
needs:
|
||||
- build-server
|
||||
- build-outpost
|
||||
env:
|
||||
AWS_REGION: eu-central-1
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
- name: Upload template
|
||||
run: |
|
||||
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
|
||||
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
|
||||
test-release:
|
||||
needs:
|
||||
- build-server
|
||||
@ -186,7 +206,7 @@ jobs:
|
||||
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
|
||||
docker cp ${container}:web/ .
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v3
|
||||
uses: getsentry/action-release@v1
|
||||
continue-on-error: true
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,7 +14,16 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
make test-docker
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
|
21
.github/workflows/repo-mirror.yml
vendored
21
.github/workflows/repo-mirror.yml
vendored
@ -1,21 +0,0 @@
|
||||
name: "authentik-repo-mirror"
|
||||
|
||||
on: [push, delete]
|
||||
|
||||
jobs:
|
||||
to_internal:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- if: ${{ env.MIRROR_KEY != '' }}
|
||||
uses: pixta-dev/repository-mirroring-action@v1
|
||||
with:
|
||||
target_repo_url:
|
||||
git@github.com:goauthentik/authentik-internal.git
|
||||
ssh_private_key:
|
||||
${{ secrets.GH_MIRROR_KEY }}
|
||||
env:
|
||||
MIRROR_KEY: ${{ secrets.GH_MIRROR_KEY }}
|
7
.github/workflows/repo-stale.yml
vendored
7
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: "authentik-repo-stale"
|
||||
name: 'authentik-repo-stale'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
- cron: '30 1 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@ -11,7 +11,6 @@ permissions:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
@ -25,7 +24,7 @@ jobs:
|
||||
days-before-stale: 60
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||
stale-issue-label: status/stale
|
||||
stale-issue-label: wontfix
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
|
27
.github/workflows/semgrep.yml
vendored
27
.github/workflows/semgrep.yml
vendored
@ -1,27 +0,0 @@
|
||||
name: authentik-semgrep
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
paths:
|
||||
- .github/workflows/semgrep.yml
|
||||
schedule:
|
||||
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
|
||||
- cron: '12 15 * * *'
|
||||
jobs:
|
||||
semgrep:
|
||||
name: semgrep/ci
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
|
||||
container:
|
||||
image: semgrep/semgrep
|
||||
if: (github.actor != 'dependabot[bot]')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: semgrep ci
|
@ -1,13 +1,9 @@
|
||||
---
|
||||
name: authentik-translate-extract-compile
|
||||
name: authentik-backend-translate-extract-compile
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -19,31 +15,24 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: run extract
|
||||
run: |
|
||||
uv run make i18n-extract
|
||||
poetry run make i18n-extract
|
||||
- name: run compile
|
||||
run: |
|
||||
uv run ak compilemessages
|
||||
poetry run ak compilemessages
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: extract-compile-backend-translation
|
||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -11,10 +11,6 @@ local_settings.py
|
||||
db.sqlite3
|
||||
media
|
||||
|
||||
# Node
|
||||
|
||||
node_modules
|
||||
|
||||
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
|
||||
# in your Git repository. Update and uncomment the following line accordingly.
|
||||
# <django-project-name>/staticfiles/
|
||||
@ -37,7 +33,6 @@ eggs/
|
||||
lib64/
|
||||
parts/
|
||||
dist/
|
||||
out/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
@ -214,6 +209,3 @@ source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
||||
### Docker ###
|
||||
docker-compose.override.yml
|
||||
|
@ -1,47 +0,0 @@
|
||||
# Prettier Ignorefile
|
||||
|
||||
## Static Files
|
||||
**/LICENSE
|
||||
|
||||
authentik/stages/**/*
|
||||
|
||||
## Build asset directories
|
||||
coverage
|
||||
dist
|
||||
out
|
||||
.docusaurus
|
||||
website/docs/developer-docs/api/**/*
|
||||
|
||||
## Environment
|
||||
*.env
|
||||
|
||||
## Secrets
|
||||
*.secrets
|
||||
|
||||
## Yarn
|
||||
.yarn/**/*
|
||||
|
||||
## Node
|
||||
node_modules
|
||||
coverage
|
||||
|
||||
## Configs
|
||||
*.log
|
||||
*.yaml
|
||||
*.yml
|
||||
|
||||
# Templates
|
||||
# TODO: Rename affected files to *.template.* or similar.
|
||||
*.html
|
||||
*.mdx
|
||||
*.md
|
||||
|
||||
## Import order matters
|
||||
poly.ts
|
||||
src/locale-codes.ts
|
||||
src/locales/
|
||||
|
||||
# Storybook
|
||||
storybook-static/
|
||||
.storybook/css-import-maps*
|
||||
|
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,7 +2,6 @@
|
||||
"recommendations": [
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"charliermarsh.ruff",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
@ -11,12 +10,12 @@
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.debugpy",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
"unifiedjs.vscode-mdx"
|
||||
]
|
||||
}
|
||||
|
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,76 +2,26 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug: Attach Server Core",
|
||||
"type": "debugpy",
|
||||
"name": "Python: PDB attach Server",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 9901
|
||||
"port": 6800
|
||||
},
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Attach Worker",
|
||||
"type": "debugpy",
|
||||
"name": "Python: PDB attach Worker",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 9901
|
||||
"port": 6900
|
||||
},
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "."
|
||||
}
|
||||
],
|
||||
"justMyCode": true,
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Server Router",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/server",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start LDAP Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/ldap",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Proxy Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/proxy",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start RAC Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/rac",
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug: Start Radius Outpost",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/cmd/radius",
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
30
.vscode/settings.json
vendored
30
.vscode/settings.json
vendored
@ -1,4 +1,25 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"akadmin",
|
||||
"asgi",
|
||||
"authentik",
|
||||
"authn",
|
||||
"entra",
|
||||
"goauthentik",
|
||||
"jwks",
|
||||
"kubernetes",
|
||||
"oidc",
|
||||
"openid",
|
||||
"passwordless",
|
||||
"plex",
|
||||
"saml",
|
||||
"scim",
|
||||
"slo",
|
||||
"sso",
|
||||
"totp",
|
||||
"traefik",
|
||||
"webauthn"
|
||||
],
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"yaml.customTags": [
|
||||
@ -11,12 +32,11 @@
|
||||
"!If sequence",
|
||||
"!Index scalar",
|
||||
"!KeyOf scalar",
|
||||
"!Value scalar",
|
||||
"!AtIndex scalar"
|
||||
"!Value scalar"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||
"typescript.tsdk": "./node_modules/typescript/lib",
|
||||
"typescript.tsdk": "./web/node_modules/typescript/lib",
|
||||
"typescript.enablePromptUseWorkspaceTsdk": true,
|
||||
"yaml.schemas": {
|
||||
"./blueprints/schema.json": "blueprints/**/*.yaml"
|
||||
@ -30,5 +50,7 @@
|
||||
}
|
||||
],
|
||||
"go.testFlags": ["-count=1"],
|
||||
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
|
||||
"github-actions.workflows.pinned.workflows": [
|
||||
".github/workflows/ci-main.yml"
|
||||
]
|
||||
}
|
||||
|
46
.vscode/tasks.json
vendored
46
.vscode/tasks.json
vendored
@ -3,13 +3,8 @@
|
||||
"tasks": [
|
||||
{
|
||||
"label": "authentik/core: make",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"lint-fix",
|
||||
"lint"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "make", "lint-fix", "lint"],
|
||||
"presentation": {
|
||||
"panel": "new"
|
||||
},
|
||||
@ -17,12 +12,8 @@
|
||||
},
|
||||
{
|
||||
"label": "authentik/core: run",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"ak",
|
||||
"server"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "ak", "server"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -32,17 +23,13 @@
|
||||
{
|
||||
"label": "authentik/web: make",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"web"
|
||||
],
|
||||
"args": ["web"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/web: watch",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"web-watch"
|
||||
],
|
||||
"args": ["web-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -52,26 +39,19 @@
|
||||
{
|
||||
"label": "authentik: install",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"install",
|
||||
"-j4"
|
||||
],
|
||||
"args": ["install", "-j4"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/website: make",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"website"
|
||||
],
|
||||
"args": ["website"],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "authentik/website: watch",
|
||||
"command": "make",
|
||||
"args": [
|
||||
"website-watch"
|
||||
],
|
||||
"args": ["website-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
@ -80,12 +60,8 @@
|
||||
},
|
||||
{
|
||||
"label": "authentik/api: generate",
|
||||
"command": "uv",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"gen"
|
||||
],
|
||||
"command": "poetry",
|
||||
"args": ["run", "make", "gen"],
|
||||
"group": "build"
|
||||
}
|
||||
]
|
||||
|
15
CODEOWNERS
15
CODEOWNERS
@ -10,30 +10,19 @@ schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
uv.lock @goauthentik/backend
|
||||
poetry.lock @goauthentik/backend
|
||||
go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
lifecycle/aws/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
# Web packages
|
||||
packages/ @goauthentik/frontend
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
# Locale
|
||||
locale/ @goauthentik/backend @goauthentik/frontend
|
||||
web/xliff/ @goauthentik/backend @goauthentik/frontend
|
||||
# Docs & Website
|
||||
website/ @goauthentik/docs
|
||||
CODE_OF_CONDUCT.md @goauthentik/docs
|
||||
# Security
|
||||
SECURITY.md @goauthentik/security @goauthentik/docs
|
||||
website/docs/security/ @goauthentik/security @goauthentik/docs
|
||||
website/docs/security/ @goauthentik/security
|
||||
|
@ -5,7 +5,7 @@
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socioeconomic status,
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
|
@ -1 +1 @@
|
||||
website/docs/developer-docs/index.md
|
||||
website/developer-docs/index.md
|
87
Dockerfile
87
Dockerfile
@ -43,7 +43,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||
RUN npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.23-fips-bookworm AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
@ -76,75 +76,55 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
|
||||
CGO_ENABLED=1 GOFIPS140=latest GOARM="${TARGETVARIANT#v}" \
|
||||
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
|
||||
go build -o /go/authentik ./cmd/server
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 AS geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
||||
ENV GEOIPUPDATE_VERBOSE="1"
|
||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||
|
||||
USER root
|
||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.7.3 AS uv
|
||||
# Stage 6: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base
|
||||
|
||||
ENV VENV_PATH="/ak-root/.venv" \
|
||||
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||
UV_COMPILE_BYTECODE=1 \
|
||||
UV_LINK_MODE=copy \
|
||||
UV_NATIVE_TLS=1 \
|
||||
UV_PYTHON_DOWNLOADS=0
|
||||
|
||||
WORKDIR /ak-root/
|
||||
|
||||
COPY --from=uv /uv /uvx /bin/
|
||||
|
||||
# Stage 7: Python dependencies
|
||||
FROM python-base AS python-deps
|
||||
# Stage 5: Python dependencies
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS python-deps
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
ENV PATH="/root/.cargo/bin:$PATH"
|
||||
ENV VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends \
|
||||
# Build essentials
|
||||
build-essential pkg-config libffi-dev git \
|
||||
# cryptography
|
||||
curl \
|
||||
# libxml
|
||||
libxslt-dev zlib1g-dev \
|
||||
# postgresql
|
||||
libpq-dev \
|
||||
# python-kadmin-rs
|
||||
clang libkrb5-dev sccache \
|
||||
# xmlsec
|
||||
libltdl-dev && \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
|
||||
|
||||
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
|
||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||
pip install --force-reinstall /wheels/*"
|
||||
|
||||
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
|
||||
--mount=type=bind,target=uv.lock,src=uv.lock \
|
||||
--mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-install-project --no-dev
|
||||
|
||||
# Stage 8: Run
|
||||
FROM python-base AS final-image
|
||||
# Stage 6: Run
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS final-image
|
||||
|
||||
ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
@ -160,12 +140,10 @@ WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
|
||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip3 install --no-cache-dir --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
@ -176,16 +154,15 @@ RUN apt-get update && \
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./uv.lock /
|
||||
COPY ./poetry.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/build/ /website/help/
|
||||
@ -196,7 +173,11 @@ USER 1000
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
GOFIPS=1
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
|
||||
ENV GOFIPS=1
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
|
||||
|
86
Makefile
86
Makefile
@ -4,7 +4,7 @@
|
||||
PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
@ -12,9 +12,24 @@ GEN_API_TS = "gen-ts-api"
|
||||
GEN_API_PY = "gen-py-api"
|
||||
GEN_API_GO = "gen-go-api"
|
||||
|
||||
pg_user := $(shell uv run python -m authentik.common.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell uv run python -m authentik.common.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell uv run python -m authentik.common.config postgresql.name 2>/dev/null)
|
||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/developer-docs \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
@ -31,38 +46,41 @@ help: ## Show this help
|
||||
go-test:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql redis
|
||||
docker compose run -u root server test-all
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
uv run coverage run manage.py test --keepdb authentik
|
||||
uv run coverage html
|
||||
uv run coverage report
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
uv run black $(PY_SOURCES)
|
||||
uv run ruff check --fix $(PY_SOURCES)
|
||||
black $(PY_SOURCES)
|
||||
ruff check --fix $(PY_SOURCES)
|
||||
|
||||
lint-codespell: ## Reports spelling errors.
|
||||
uv run codespell -w
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint: ## Lint the python and golang sources
|
||||
uv run bandit -c pyproject.toml -r $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
uv sync --frozen
|
||||
poetry install
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
uv run python -m lifecycle.migrate
|
||||
python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
|
||||
aws-cfn:
|
||||
cd lifecycle/aws && npm run aws-cfn
|
||||
|
||||
run: ## Run the main authentik server process
|
||||
uv run ak server
|
||||
|
||||
core-i18n-extract:
|
||||
uv run ak makemessages \
|
||||
ak makemessages \
|
||||
--add-location file \
|
||||
--no-obsolete \
|
||||
--ignore web \
|
||||
@ -93,11 +111,11 @@ gen-build: ## Extract the schema from the database
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
uv run ak make_blueprint_schema > blueprints/schema.json
|
||||
ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
uv run ak spectacular --file schema.yml
|
||||
ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
@ -132,7 +150,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/${GEN_API_TS} \
|
||||
@ -148,7 +166,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g python \
|
||||
-o /local/${GEN_API_PY} \
|
||||
@ -176,7 +194,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
||||
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
uv run scripts/generate_config.py
|
||||
python -m scripts.generate_config
|
||||
|
||||
gen: gen-build gen-client-ts
|
||||
|
||||
@ -243,9 +261,6 @@ docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
test-docker:
|
||||
BUILD=true ./scripts/test_docker.sh
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
@ -257,21 +272,16 @@ ci--meta-debug:
|
||||
node --version
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
uv run black --check $(PY_SOURCES)
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
uv run ruff check $(PY_SOURCES)
|
||||
ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
uv run codespell -s
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
uv run bandit -r $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES)
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
uv run ak makemigrations --check
|
||||
|
||||
ci-test: ci--meta-debug
|
||||
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||
uv run coverage report
|
||||
uv run coverage xml
|
||||
ak makemigrations --check
|
||||
|
@ -34,7 +34,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Development
|
||||
|
||||
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
|
||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
@ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
10
SECURITY.md
10
SECURITY.md
@ -2,7 +2,7 @@ authentik takes security very seriously. We follow the rules of [responsible di
|
||||
|
||||
## Independent audits and pentests
|
||||
|
||||
We are committed to engaging in regular pentesting and security audits of authentik. Defining and adhering to a cadence of external testing ensures a stronger probability that our code base, our features, and our architecture is as secure and non-exploitable as possible. For more details about specific audits and pentests, refer to "Audits and Certificates" in our [Security documentation](https://docs.goauthentik.io/docs/security).
|
||||
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
|
||||
|
||||
## What authentik classifies as a CVE
|
||||
|
||||
@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2025.2.x | ✅ |
|
||||
| 2025.4.x | ✅ |
|
||||
| Version | Supported |
|
||||
| -------- | --------- |
|
||||
| 2024.4.x | ✅ |
|
||||
| 2024.6.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2025.4.0"
|
||||
__version__ = "2024.8.4"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
@ -16,5 +16,5 @@ def get_full_version() -> str:
|
||||
"""Get full version, with build hash appended"""
|
||||
version = __version__
|
||||
if (build_hash := get_build_hash()) != "":
|
||||
return f"{version}+{build_hash}"
|
||||
version += "." + build_hash
|
||||
return version
|
||||
|
@ -7,8 +7,8 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.common.utils.reflection import get_apps
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
from authentik.policies.event_matcher.models import model_choices
|
||||
|
||||
|
||||
|
@ -7,10 +7,7 @@ from sys import version as python_version
|
||||
from typing import TypedDict
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
from django.views.debug import SafeExceptionReporterFilter
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
@ -18,10 +15,12 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.common.utils.reflection import get_env
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
|
||||
|
||||
@ -53,16 +52,10 @@ class SystemInfoSerializer(PassiveSerializer):
|
||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||
"""Get HTTP Request headers"""
|
||||
headers = {}
|
||||
raw_session = request._request.COOKIES.get(settings.SESSION_COOKIE_NAME)
|
||||
for key, value in request.META.items():
|
||||
if not isinstance(value, str):
|
||||
continue
|
||||
actual_value = value
|
||||
if raw_session is not None and raw_session in actual_value:
|
||||
actual_value = actual_value.replace(
|
||||
raw_session, SafeExceptionReporterFilter.cleansed_substitute
|
||||
)
|
||||
headers[key] = actual_value
|
||||
headers[key] = value
|
||||
return headers
|
||||
|
||||
def get_http_host(self, request: Request) -> str:
|
||||
@ -102,12 +95,6 @@ class SystemInfoSerializer(PassiveSerializer):
|
||||
|
||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||
"""Get the FQDN configured on the embedded outpost"""
|
||||
if not apps.is_installed("authentik.outposts"):
|
||||
return ""
|
||||
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts.exists(): # pragma: no cover
|
||||
return ""
|
||||
|
@ -1,33 +0,0 @@
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from authentik.admin.models import VersionHistory
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
|
||||
|
||||
class VersionHistorySerializer(ModelSerializer):
|
||||
"""VersionHistory Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = VersionHistory
|
||||
fields = [
|
||||
"id",
|
||||
"timestamp",
|
||||
"version",
|
||||
"build",
|
||||
]
|
||||
|
||||
|
||||
class VersionHistoryViewSet(ReadOnlyModelViewSet):
|
||||
"""VersionHistory Viewset"""
|
||||
|
||||
queryset = VersionHistory.objects.all()
|
||||
serializer_class = VersionHistorySerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filterset_fields = [
|
||||
"version",
|
||||
"build",
|
||||
]
|
||||
search_fields = ["version", "build"]
|
||||
ordering = ["-timestamp"]
|
||||
pagination_class = None
|
@ -1,16 +1,12 @@
|
||||
"""authentik administration overview"""
|
||||
|
||||
from socket import gethostname
|
||||
|
||||
from django.conf import settings
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from packaging.version import parse
|
||||
from rest_framework.fields import BooleanField, CharField
|
||||
from rest_framework.fields import IntegerField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
@ -20,38 +16,11 @@ class WorkerView(APIView):
|
||||
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
|
||||
@extend_schema(
|
||||
responses=inline_serializer(
|
||||
"Worker",
|
||||
fields={
|
||||
"worker_id": CharField(),
|
||||
"version": CharField(),
|
||||
"version_matching": BooleanField(),
|
||||
},
|
||||
many=True,
|
||||
)
|
||||
)
|
||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get currently connected worker count."""
|
||||
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
|
||||
our_version = parse(get_full_version())
|
||||
response = []
|
||||
for worker in raw:
|
||||
key = list(worker.keys())[0]
|
||||
version = worker[key].get("version")
|
||||
version_matching = False
|
||||
if version:
|
||||
version_matching = parse(version) == our_version
|
||||
response.append(
|
||||
{"worker_id": key, "version": version, "version_matching": version_matching}
|
||||
)
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
# In debug we run with `task_always_eager`, so tasks are ran on the main process
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
response.append(
|
||||
{
|
||||
"worker_id": f"authentik-debug@{gethostname()}",
|
||||
"version": get_full_version(),
|
||||
"version_matching": True,
|
||||
}
|
||||
)
|
||||
return Response(response)
|
||||
count += 1
|
||||
return Response({"count": count})
|
||||
|
@ -1,10 +1,11 @@
|
||||
"""authentik admin app config"""
|
||||
|
||||
from prometheus_client import Info
|
||||
from prometheus_client import Gauge, Info
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
|
||||
|
||||
|
||||
class AuthentikAdminConfig(ManagedAppConfig):
|
||||
|
@ -1,22 +0,0 @@
|
||||
"""authentik admin models"""
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class VersionHistory(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
timestamp = models.DateTimeField()
|
||||
version = models.TextField()
|
||||
build = models.TextField()
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = "authentik_version_history"
|
||||
ordering = ("-timestamp",)
|
||||
verbose_name = _("Version history")
|
||||
verbose_name_plural = _("Version history")
|
||||
default_permissions = []
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.version}.{self.build} ({self.timestamp})"
|
@ -2,7 +2,7 @@
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.common.utils.time import fqdn_rand
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"admin_latest_version": {
|
||||
|
@ -1,35 +1,14 @@
|
||||
"""admin signals"""
|
||||
|
||||
from django.dispatch import receiver
|
||||
from packaging.version import parse
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.admin.apps import GAUGE_WORKERS
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
GAUGE_WORKERS = Gauge(
|
||||
"authentik_admin_workers",
|
||||
"Currently connected workers, their versions and if they are the same version as authentik",
|
||||
["version", "version_matched"],
|
||||
)
|
||||
|
||||
|
||||
_version = parse(get_full_version())
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
|
||||
worker_version_count = {}
|
||||
for worker in raw:
|
||||
key = list(worker.keys())[0]
|
||||
version = worker[key].get("version")
|
||||
version_matching = False
|
||||
if version:
|
||||
version_matching = parse(version) == _version
|
||||
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
|
||||
worker_version_count[version]["count"] += 1
|
||||
for version, stats in worker_version_count.items():
|
||||
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
GAUGE_WORKERS.set(count)
|
||||
|
@ -1,24 +1,28 @@
|
||||
"""authentik admin tasks"""
|
||||
|
||||
import re
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import URLValidator
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from packaging.version import parse
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.apps import PROM_INFO
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.common.utils.http import get_http_session
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
VERSION_NULL = "0.0.0"
|
||||
VERSION_CACHE_KEY = "authentik_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||
# Chop of the first ^ because we want to search the entire string
|
||||
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||
LOCAL_VERSION = parse(__version__)
|
||||
|
||||
|
||||
@ -74,16 +78,10 @@ def update_latest_version(self: SystemTask):
|
||||
context__new_version=upstream_version,
|
||||
).exists():
|
||||
return
|
||||
Event.new(
|
||||
EventAction.UPDATE_AVAILABLE,
|
||||
message=_(
|
||||
"New version {version} available!".format(
|
||||
version=upstream_version,
|
||||
)
|
||||
),
|
||||
new_version=upstream_version,
|
||||
changelog=data.get("stable", {}).get("changelog_url"),
|
||||
).save()
|
||||
event_dict = {"new_version": upstream_version}
|
||||
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
|
||||
event_dict["message"] = f"Changelog: {match.group()}"
|
||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.set_error(exc)
|
||||
|
@ -8,7 +8,7 @@ from django.urls import reverse
|
||||
from authentik import __version__
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAdminAPI(TestCase):
|
||||
@ -34,7 +34,7 @@ class TestAdminAPI(TestCase):
|
||||
response = self.client.get(reverse("authentik_api:admin_workers"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body), 0)
|
||||
self.assertEqual(body["count"], 0)
|
||||
|
||||
def test_metrics(self):
|
||||
"""Test metrics API"""
|
||||
|
@ -9,15 +9,14 @@ from authentik.admin.tasks import (
|
||||
clear_update_notifications,
|
||||
update_latest_version,
|
||||
)
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
RESPONSE_VALID = {
|
||||
"$schema": "https://version.goauthentik.io/schema.json",
|
||||
"stable": {
|
||||
"version": "99999999.9999999",
|
||||
"changelog": "See https://goauthentik.io/test",
|
||||
"changelog_url": "https://goauthentik.io/test",
|
||||
"reason": "bugfix",
|
||||
},
|
||||
}
|
||||
@ -36,7 +35,7 @@ class TestAdminTasks(TestCase):
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="New version 99999999.9999999 available!",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
).exists()
|
||||
)
|
||||
# test that a consecutive check doesn't create a duplicate event
|
||||
@ -46,7 +45,7 @@ class TestAdminTasks(TestCase):
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="New version 99999999.9999999 available!",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
)
|
||||
),
|
||||
1,
|
||||
|
@ -6,7 +6,6 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.version_history import VersionHistoryViewSet
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
|
||||
api_urlpatterns = [
|
||||
@ -18,7 +17,6 @@ api_urlpatterns = [
|
||||
name="admin_metrics",
|
||||
),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
("admin/version/history", VersionHistoryViewSet, "version_history"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
]
|
||||
|
@ -1,16 +1,19 @@
|
||||
"""API Authentication"""
|
||||
|
||||
from hmac import compare_digest
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.core.middleware import CTX_AUTH_VIA
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -65,9 +68,28 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
CTX_AUTH_VIA.set("jwt")
|
||||
return jwt_token.user
|
||||
# then try to auth via secret key (for embedded outpost/etc)
|
||||
user = token_secret_key(auth_credentials)
|
||||
if user:
|
||||
CTX_AUTH_VIA.set("secret_key")
|
||||
return user
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
|
||||
|
||||
def token_secret_key(value: str) -> User | None:
|
||||
"""Check if the token is the secret key
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
|
||||
if not compare_digest(value, settings.SECRET_KEY):
|
||||
return None
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts:
|
||||
return None
|
||||
outpost = outposts.first()
|
||||
return outpost.user
|
||||
|
||||
|
||||
class TokenAuthentication(BaseAuthentication):
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
|
||||
|
67
authentik/api/authorization.py
Normal file
67
authentik/api/authorization.py
Normal file
@ -0,0 +1,67 @@
|
||||
"""API Authorization"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
from authentik.rbac.filters import ObjectFilter
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
"""Filter objects by their owner"""
|
||||
|
||||
owner_key = "user"
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
if request.user.is_superuser:
|
||||
return queryset
|
||||
return queryset.filter(**{self.owner_key: request.user})
|
||||
|
||||
|
||||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class OwnerPermissions(BasePermission):
|
||||
"""Authorize requests by an object's owner matching the requesting user"""
|
||||
|
||||
owner_key = "user"
|
||||
|
||||
def has_permission(self, request: Request, view) -> bool:
|
||||
"""If the user is authenticated, we allow all requests here. For listing, the
|
||||
object-level permissions are done by the filter backend"""
|
||||
return request.user.is_authenticated
|
||||
|
||||
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
|
||||
"""Check if the object's owner matches the currently logged in user"""
|
||||
if not hasattr(obj, self.owner_key):
|
||||
return False
|
||||
owner = getattr(obj, self.owner_key)
|
||||
if owner != request.user:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class OwnerSuperuserPermissions(OwnerPermissions):
|
||||
"""Similar to OwnerPermissions, except always allow access for superusers"""
|
||||
|
||||
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
|
||||
if request.user.is_superuser:
|
||||
return True
|
||||
return super().has_object_permission(request, view, obj)
|
@ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
|
||||
return component
|
||||
|
||||
|
||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
|
||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
|
||||
"""Workaround to set a default response for endpoints.
|
||||
Workaround suggested at
|
||||
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
|
||||
|
@ -7,7 +7,7 @@ API Browser - {{ brand.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script>
|
||||
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
{% endblock %}
|
||||
|
@ -3,15 +3,19 @@
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.core.models import Token, TokenIntents
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
@ -48,6 +52,21 @@ class TestAPIAuth(TestCase):
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_fail(self):
|
||||
"""Test managed outpost"""
|
||||
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
outpost.user.delete()
|
||||
outpost.delete()
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_success(self):
|
||||
"""Test managed outpost"""
|
||||
user: User = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
self.assertEqual(user.type, UserTypes.INTERNAL_SERVICE_ACCOUNT)
|
||||
|
||||
def test_jwt_valid(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
|
@ -19,9 +19,9 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.context_processors.base import get_context_processors
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
capabilities = Signal()
|
||||
|
||||
|
@ -11,7 +11,7 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.v3.config import ConfigView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.common.utils.reflection import get_apps
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
@ -7,7 +7,7 @@ from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
@ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
|
||||
|
@ -1,68 +0,0 @@
|
||||
"""Test and debug Blueprints"""
|
||||
|
||||
import atexit
|
||||
import readline
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from sys import exit as sysexit
|
||||
from textwrap import indent
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, EntryInvalidError
|
||||
from authentik.common.utils.errors import exception_to_string
|
||||
from authentik.core.management.commands.shell import get_banner_text
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Test and debug Blueprints"""
|
||||
|
||||
lines = []
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
histfolder = Path("~").expanduser() / Path(".local/share/authentik")
|
||||
histfolder.mkdir(parents=True, exist_ok=True)
|
||||
histfile = histfolder / Path("blueprint_shell_history")
|
||||
readline.parse_and_bind("tab: complete")
|
||||
readline.parse_and_bind("set editing-mode vi")
|
||||
|
||||
try:
|
||||
readline.read_history_file(str(histfile))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
atexit.register(readline.write_history_file, str(histfile))
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Interactively debug blueprint files"""
|
||||
self.stdout.write(get_banner_text("Blueprint shell"))
|
||||
self.stdout.write("Type '.eval' to evaluate previously entered statement(s).")
|
||||
|
||||
def do_eval():
|
||||
yaml_input = "\n".join([line for line in self.lines if line])
|
||||
data = load(yaml_input, BlueprintLoader)
|
||||
self.stdout.write(pformat(data))
|
||||
self.lines = []
|
||||
|
||||
while True:
|
||||
try:
|
||||
line = input("> ")
|
||||
if line == ".eval":
|
||||
do_eval()
|
||||
else:
|
||||
self.lines.append(line)
|
||||
except EntryInvalidError as exc:
|
||||
self.stdout.write("Failed to evaluate expression:")
|
||||
self.stdout.write(indent(exception_to_string(exc), prefix=" "))
|
||||
except EOFError:
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write()
|
||||
sysexit(0)
|
||||
self.stdout.write()
|
@ -15,7 +15,7 @@ from authentik import __version__
|
||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.common.models import SerializerModel
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -126,7 +126,7 @@ class Command(BaseCommand):
|
||||
def_name_perm = f"model_{model_path}_permissions"
|
||||
def_path_perm = f"#/$defs/{def_name_perm}"
|
||||
self.schema["$defs"][def_name_perm] = self.model_permissions(model)
|
||||
template = {
|
||||
return {
|
||||
"type": "object",
|
||||
"required": ["model", "identifiers"],
|
||||
"properties": {
|
||||
@ -143,11 +143,6 @@ class Command(BaseCommand):
|
||||
"identifiers": {"$ref": def_path},
|
||||
},
|
||||
}
|
||||
# Meta models don't require identifiers, as there's no matching database model to find
|
||||
if issubclass(model, BaseMetaModel):
|
||||
del template["properties"]["identifiers"]
|
||||
template["required"].remove("identifiers")
|
||||
return template
|
||||
|
||||
def field_to_jsonschema(self, field: Field) -> dict:
|
||||
"""Convert a single field to json schema"""
|
||||
|
@ -11,7 +11,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
||||
@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
||||
if version != 1:
|
||||
return
|
||||
blueprint_file.seek(0)
|
||||
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||
instance: BlueprintInstance = (
|
||||
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||
)
|
||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.common.migrations import fallback_names
|
||||
from authentik.lib.migrations import fallback_names
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
@ -10,14 +10,14 @@ from rest_framework.serializers import Serializer
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX, BlueprintOCIClient, OCIException
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.common.exceptions import NotReportedException
|
||||
from authentik.common.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class BlueprintRetrievalFailed(NotReportedException):
|
||||
class BlueprintRetrievalFailed(SentryIgnoredException):
|
||||
"""Error raised when we are unable to fetch the blueprint contents, whether it be HTTP files
|
||||
not being accessible or local files not being readable"""
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.common.utils.time import fqdn_rand
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
|
@ -146,10 +146,6 @@ entries:
|
||||
]
|
||||
]
|
||||
nested_context: !Context context2
|
||||
at_index_sequence: !AtIndex [!Context sequence, 0]
|
||||
at_index_sequence_default: !AtIndex [!Context sequence, 100, "non existent"]
|
||||
at_index_mapping: !AtIndex [!Context mapping, "key2"]
|
||||
at_index_mapping_default: !AtIndex [!Context mapping, "invalid", "non existent"]
|
||||
identifiers:
|
||||
name: test
|
||||
conditions:
|
||||
|
@ -3,7 +3,7 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
|
@ -27,8 +27,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
||||
base = Path("blueprints/")
|
||||
rel_path = Path(file_name).relative_to(base)
|
||||
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
validation, logs = importer.validate()
|
||||
self.assertTrue(validation, logs)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
return tester
|
||||
|
@ -6,7 +6,7 @@ from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.common.models import SerializerModel
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
|
||||
|
@ -6,10 +6,10 @@ from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.common.tests import load_fixture
|
||||
from authentik.core.models import Group
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
@ -215,10 +215,6 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
},
|
||||
"nested_context": "context-nested-value",
|
||||
"env_null": None,
|
||||
"at_index_sequence": "foo",
|
||||
"at_index_sequence_default": "non existent",
|
||||
"at_index_mapping": 2,
|
||||
"at_index_mapping_default": "non existent",
|
||||
}
|
||||
).exists()
|
||||
)
|
||||
|
@ -7,8 +7,8 @@ from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
TMP = mkdtemp("authentik-blueprints")
|
||||
|
||||
|
@ -3,11 +3,11 @@
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.common.tests import load_fixture
|
||||
from authentik.core.models import Application, Token, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
|
||||
|
||||
|
@ -3,9 +3,9 @@
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.common.tests import load_fixture
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
|
@ -4,10 +4,10 @@ from django.test import TransactionTestCase
|
||||
from guardian.shortcuts import get_perms
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.common.tests import load_fixture
|
||||
from authentik.core.models import User
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.rbac.models import Role
|
||||
|
||||
|
||||
|
@ -3,9 +3,9 @@
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.common.tests import load_fixture
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
|
@ -8,8 +8,8 @@ from yaml import dump
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
TMP = mkdtemp("authentik-blueprints")
|
||||
|
||||
|
@ -19,15 +19,11 @@ from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
|
||||
from authentik.common.exceptions import NotReportedException
|
||||
from authentik.common.models import SerializerModel
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
|
||||
|
||||
class UNSET:
|
||||
"""Used to test whether a key has not been set."""
|
||||
|
||||
|
||||
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
"""Get object's attributes via their serializer, and convert it to a normal dict"""
|
||||
serializer: Serializer = obj.serializer(obj)
|
||||
@ -164,7 +160,9 @@ class BlueprintEntry:
|
||||
"""Get the blueprint model, with yaml tags resolved if present"""
|
||||
return str(self.tag_resolver(self.model, blueprint))
|
||||
|
||||
def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]:
|
||||
def get_permissions(
|
||||
self, blueprint: "Blueprint"
|
||||
) -> Generator[BlueprintEntryPermission, None, None]:
|
||||
"""Get permissions of this entry, with all yaml tags resolved"""
|
||||
for perm in self.permissions:
|
||||
yield BlueprintEntryPermission(
|
||||
@ -200,9 +198,6 @@ class Blueprint:
|
||||
class YAMLTag:
|
||||
"""Base class for all YAML Tags"""
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return str(self.resolve(BlueprintEntry(""), Blueprint()))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag logic"""
|
||||
raise NotImplementedError
|
||||
@ -561,53 +556,6 @@ class Value(EnumeratedItem):
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry) from exc
|
||||
|
||||
|
||||
class AtIndex(YAMLTag):
|
||||
"""Get value at index of a sequence or mapping"""
|
||||
|
||||
obj: YAMLTag | dict | list | tuple
|
||||
attribute: int | str | YAMLTag
|
||||
default: Any | UNSET
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.obj = loader.construct_object(node.value[0])
|
||||
self.attribute = loader.construct_object(node.value[1])
|
||||
if len(node.value) == 2: # noqa: PLR2004
|
||||
self.default = UNSET
|
||||
else:
|
||||
self.default = loader.construct_object(node.value[2])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.obj, YAMLTag):
|
||||
obj = self.obj.resolve(entry, blueprint)
|
||||
else:
|
||||
obj = self.obj
|
||||
if isinstance(self.attribute, YAMLTag):
|
||||
attribute = self.attribute.resolve(entry, blueprint)
|
||||
else:
|
||||
attribute = self.attribute
|
||||
|
||||
if isinstance(obj, list | tuple):
|
||||
try:
|
||||
return obj[attribute]
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Invalid index for list: {attribute}", entry
|
||||
) from exc
|
||||
except IndexError as exc:
|
||||
if self.default is UNSET:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Index out of range: {attribute}", entry
|
||||
) from exc
|
||||
return self.default
|
||||
if attribute in obj:
|
||||
return obj[attribute]
|
||||
else:
|
||||
if self.default is UNSET:
|
||||
raise EntryInvalidError.from_entry(f"Key does not exist: {attribute}", entry)
|
||||
return self.default
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
"""Dump dataclasses to yaml"""
|
||||
|
||||
@ -658,10 +606,9 @@ class BlueprintLoader(SafeLoader):
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
self.add_constructor("!AtIndex", AtIndex)
|
||||
|
||||
|
||||
class EntryInvalidError(NotReportedException):
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
entry_model: str | None
|
||||
|
@ -8,11 +8,14 @@ from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from guardian.models import UserObjectPermission
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
@ -28,26 +31,103 @@ from authentik.blueprints.v1.common import (
|
||||
EntryInvalidError,
|
||||
)
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.common.exceptions import NotReportedException
|
||||
from authentik.common.models import SerializerModel, excluded_models
|
||||
from authentik.common.utils.reflection import get_apps
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import (
|
||||
AuthenticatedSession,
|
||||
GroupSourceConnection,
|
||||
PropertyMapping,
|
||||
Provider,
|
||||
Source,
|
||||
User,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import LicenseUsage
|
||||
from authentik.enterprise.providers.google_workspace.models import (
|
||||
GoogleWorkspaceProviderGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.events.models import SystemTask
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.policies.reputation.models import Reputation
|
||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/docs/customize/blueprints/v1/models.md when used
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def excluded_models() -> list[type[Model]]:
|
||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
||||
or other means (internal only, base classes, non-used objects, etc)"""
|
||||
|
||||
from django.contrib.auth.models import Group as DjangoGroup
|
||||
from django.contrib.auth.models import User as DjangoUser
|
||||
|
||||
return (
|
||||
# Django only classes
|
||||
DjangoUser,
|
||||
DjangoGroup,
|
||||
ContentType,
|
||||
Permission,
|
||||
UserObjectPermission,
|
||||
# Base classes
|
||||
Provider,
|
||||
Source,
|
||||
PropertyMapping,
|
||||
UserSourceConnection,
|
||||
GroupSourceConnection,
|
||||
Stage,
|
||||
OutpostServiceConnection,
|
||||
Policy,
|
||||
PolicyBindingModel,
|
||||
# Classes that have other dependencies
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||
FlowToken,
|
||||
LicenseUsage,
|
||||
SCIMProviderGroup,
|
||||
SCIMProviderUser,
|
||||
Tenant,
|
||||
SystemTask,
|
||||
ConnectionToken,
|
||||
AuthorizationCode,
|
||||
AccessToken,
|
||||
RefreshToken,
|
||||
Reputation,
|
||||
WebAuthnDeviceType,
|
||||
SCIMSourceUser,
|
||||
SCIMSourceGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
GoogleWorkspaceProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
MicrosoftEntraProviderGroup,
|
||||
)
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel)
|
||||
|
||||
|
||||
class DoRollback(NotReportedException):
|
||||
class DoRollback(SentryIgnoredException):
|
||||
"""Exception to trigger a rollback"""
|
||||
|
||||
|
||||
@ -207,11 +287,7 @@ class Importer:
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
if (
|
||||
not isinstance(model(), BaseMetaModel)
|
||||
and model_instance
|
||||
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
|
||||
):
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
self.logger.debug(
|
||||
"Initialise serializer with instance",
|
||||
model=model,
|
||||
@ -221,12 +297,11 @@ class Importer:
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
msg = (
|
||||
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED.value} "
|
||||
"and object exists already",
|
||||
)
|
||||
raise EntryInvalidError.from_entry(
|
||||
ValidationError({k: msg for k in entry.identifiers.keys()}, "unique"),
|
||||
(
|
||||
f"State is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
|
@ -16,14 +16,14 @@ from requests.exceptions import RequestException
|
||||
from structlog import get_logger
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
from authentik.common.exceptions import NotReportedException
|
||||
from authentik.common.utils.http import authentik_user_agent
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
OCI_PREFIX = "oci://"
|
||||
|
||||
|
||||
class OCIException(NotReportedException):
|
||||
class OCIException(SentryIgnoredException):
|
||||
"""OCI-related errors"""
|
||||
|
||||
|
||||
|
@ -30,11 +30,11 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
@ -159,7 +159,7 @@ def blueprints_discovery(self: SystemTask, path: str | None = None):
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.set_status(
|
||||
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count))
|
||||
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
|
||||
)
|
||||
|
||||
|
||||
|
@ -14,10 +14,10 @@ from rest_framework.response import Response
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.authorization import SecretKeyFilter
|
||||
from authentik.brands.models import Brand
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.rbac.filters import SecretKeyFilter
|
||||
from authentik.tenants.utils import get_current_tenant
|
||||
|
||||
|
||||
@ -49,8 +49,6 @@ class BrandSerializer(ModelSerializer):
|
||||
"branding_title",
|
||||
"branding_logo",
|
||||
"branding_favicon",
|
||||
"branding_custom_css",
|
||||
"branding_default_flow_background",
|
||||
"flow_authentication",
|
||||
"flow_invalidation",
|
||||
"flow_recovery",
|
||||
@ -86,9 +84,8 @@ class CurrentBrandSerializer(PassiveSerializer):
|
||||
|
||||
matched_domain = CharField(source="domain")
|
||||
branding_title = CharField()
|
||||
branding_logo = CharField(source="branding_logo_url")
|
||||
branding_favicon = CharField(source="branding_favicon_url")
|
||||
branding_custom_css = CharField()
|
||||
branding_logo = CharField()
|
||||
branding_favicon = CharField()
|
||||
ui_footer_links = ListField(
|
||||
child=FooterLinkSerializer(),
|
||||
read_only=True,
|
||||
@ -128,7 +125,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
|
||||
"branding_title",
|
||||
"branding_logo",
|
||||
"branding_favicon",
|
||||
"branding_default_flow_background",
|
||||
"flow_authentication",
|
||||
"flow_invalidation",
|
||||
"flow_recovery",
|
||||
|
@ -4,7 +4,7 @@ from collections.abc import Callable
|
||||
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from django.utils.translation import override
|
||||
from django.utils.translation import activate
|
||||
|
||||
from authentik.brands.utils import get_brand_for_request
|
||||
|
||||
@ -18,14 +18,10 @@ class BrandMiddleware:
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||
locale_to_set = None
|
||||
if not hasattr(request, "brand"):
|
||||
brand = get_brand_for_request(request)
|
||||
request.brand = brand
|
||||
locale = brand.default_locale
|
||||
if locale != "":
|
||||
locale_to_set = locale
|
||||
if locale_to_set:
|
||||
with override(locale_to_set):
|
||||
return self.get_response(request)
|
||||
activate(locale)
|
||||
return self.get_response(request)
|
||||
|
@ -5,7 +5,7 @@ import uuid
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.common.utils.time
|
||||
import authentik.lib.utils.time
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
"Events will be deleted after this duration.(Format:"
|
||||
" weeks=3;days=2;hours=3,seconds=2)."
|
||||
),
|
||||
validators=[authentik.common.utils.time.timedelta_string_validator],
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
|
@ -1,35 +0,0 @@
|
||||
# Generated by Django 5.0.12 on 2025-02-22 01:51
|
||||
|
||||
from pathlib import Path
|
||||
from django.db import migrations, models
|
||||
from django.apps.registry import Apps
|
||||
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
Brand = apps.get_model("authentik_brands", "brand")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
path = Path("/web/dist/custom.css")
|
||||
if not path.exists():
|
||||
return
|
||||
css = path.read_text()
|
||||
Brand.objects.using(db_alias).all().update(branding_custom_css=css)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_brands", "0007_brand_default_application"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="brand",
|
||||
name="branding_custom_css",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.RunPython(migrate_custom_css),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.0.13 on 2025-03-19 22:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_brands", "0008_brand_branding_custom_css"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="brand",
|
||||
name="branding_default_flow_background",
|
||||
field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"),
|
||||
),
|
||||
]
|
@ -8,10 +8,9 @@ from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.common.config import CONFIG
|
||||
from authentik.common.models import SerializerModel
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -33,10 +32,6 @@ class Brand(SerializerModel):
|
||||
|
||||
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
||||
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
||||
branding_custom_css = models.TextField(default="", blank=True)
|
||||
branding_default_flow_background = models.TextField(
|
||||
default="/static/dist/assets/images/flow_background.jpg"
|
||||
)
|
||||
|
||||
flow_authentication = models.ForeignKey(
|
||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
||||
@ -76,24 +71,6 @@ class Brand(SerializerModel):
|
||||
)
|
||||
attributes = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def branding_logo_url(self) -> str:
|
||||
"""Get branding_logo with the correct prefix"""
|
||||
if self.branding_logo.startswith("/static"):
|
||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_logo
|
||||
return self.branding_logo
|
||||
|
||||
def branding_favicon_url(self) -> str:
|
||||
"""Get branding_favicon with the correct prefix"""
|
||||
if self.branding_favicon.startswith("/static"):
|
||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
|
||||
return self.branding_favicon
|
||||
|
||||
def branding_default_flow_background_url(self) -> str:
|
||||
"""Get branding_default_flow_background with the correct prefix"""
|
||||
if self.branding_default_flow_background.startswith("/static"):
|
||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background
|
||||
return self.branding_default_flow_background
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.brands.api import BrandSerializer
|
||||
|
@ -7,7 +7,7 @@ from authentik.brands.api import Themes
|
||||
from authentik.brands.models import Brand
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_brand
|
||||
from authentik.crypto.generators import generate_id
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
from authentik.providers.saml.models import SAMLProvider
|
||||
|
||||
@ -24,7 +24,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": brand.domain,
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -44,7 +43,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "custom",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": "bar.baz",
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -61,7 +59,6 @@ class TestBrands(APITestCase):
|
||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": "fallback",
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
@ -124,27 +121,3 @@ class TestBrands(APITestCase):
|
||||
"subject": None,
|
||||
},
|
||||
)
|
||||
|
||||
def test_branding_url(self):
|
||||
"""Test branding attributes return correct values"""
|
||||
brand = create_test_brand()
|
||||
brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png"
|
||||
brand.branding_favicon = "https://goauthentik.io/img/icon.png"
|
||||
brand.branding_logo = "https://goauthentik.io/img/icon.png"
|
||||
brand.save()
|
||||
self.assertEqual(
|
||||
brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png"
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
|
||||
{
|
||||
"branding_logo": "https://goauthentik.io/img/icon.png",
|
||||
"branding_favicon": "https://goauthentik.io/img/icon.png",
|
||||
"branding_title": "authentik",
|
||||
"branding_custom_css": "",
|
||||
"matched_domain": brand.domain,
|
||||
"ui_footer_links": [],
|
||||
"ui_theme": Themes.AUTOMATIC,
|
||||
"default_locale": "",
|
||||
},
|
||||
)
|
||||
|
@ -1,579 +0,0 @@
|
||||
"""Test config loader"""
|
||||
|
||||
import base64
|
||||
from json import dumps
|
||||
from os import chmod, environ, unlink, write
|
||||
from tempfile import mkstemp
|
||||
from unittest import mock
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.common.config import (
|
||||
ENV_PREFIX,
|
||||
UNSET,
|
||||
Attr,
|
||||
AttrEncoder,
|
||||
ConfigLoader,
|
||||
django_db_config,
|
||||
)
|
||||
|
||||
|
||||
class TestConfig(TestCase):
|
||||
"""Test config loader"""
|
||||
|
||||
check_deprecations_env_vars = {
|
||||
ENV_PREFIX + "_REDIS__BROKER_URL": "redis://myredis:8327/43",
|
||||
ENV_PREFIX + "_REDIS__BROKER_TRANSPORT_OPTIONS": "bWFzdGVybmFtZT1teW1hc3Rlcg==",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT": "124s",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_FLOWS": "32m",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_POLICIES": "3920ns",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_REPUTATION": "298382us",
|
||||
}
|
||||
|
||||
@mock.patch.dict(environ, {ENV_PREFIX + "_test__test": "bar"})
|
||||
def test_env(self):
|
||||
"""Test simple instance"""
|
||||
config = ConfigLoader()
|
||||
config.update_from_env()
|
||||
self.assertEqual(config.get("test.test"), "bar")
|
||||
|
||||
def test_patch(self):
|
||||
"""Test patch decorator"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo.bar", "bar")
|
||||
self.assertEqual(config.get("foo.bar"), "bar")
|
||||
with config.patch("foo.bar", "baz"):
|
||||
self.assertEqual(config.get("foo.bar"), "baz")
|
||||
self.assertEqual(config.get("foo.bar"), "bar")
|
||||
|
||||
@mock.patch.dict(environ, {"foo": "bar"})
|
||||
def test_uri_env(self):
|
||||
"""Test URI parsing (environment)"""
|
||||
config = ConfigLoader()
|
||||
foo_uri = "env://foo"
|
||||
foo_parsed = config.parse_uri(foo_uri)
|
||||
self.assertEqual(foo_parsed.value, "bar")
|
||||
self.assertEqual(foo_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_parsed.source, foo_uri)
|
||||
foo_bar_uri = "env://foo?bar"
|
||||
foo_bar_parsed = config.parse_uri(foo_bar_uri)
|
||||
self.assertEqual(foo_bar_parsed.value, "bar")
|
||||
self.assertEqual(foo_bar_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_bar_parsed.source, foo_bar_uri)
|
||||
|
||||
def test_uri_file(self):
|
||||
"""Test URI parsing (file load)"""
|
||||
config = ConfigLoader()
|
||||
file, file_name = mkstemp()
|
||||
write(file, b"foo")
|
||||
_, file2_name = mkstemp()
|
||||
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||
self.assertEqual(config.parse_uri(f"file://{file_name}").value, "foo")
|
||||
self.assertEqual(config.parse_uri(f"file://{file2_name}?def").value, "def")
|
||||
unlink(file_name)
|
||||
unlink(file2_name)
|
||||
|
||||
def test_uri_file_update(self):
|
||||
"""Test URI parsing (file load and update)"""
|
||||
file, file_name = mkstemp()
|
||||
write(file, b"foo")
|
||||
config = ConfigLoader(file_test=f"file://{file_name}")
|
||||
self.assertEqual(config.get("file_test"), "foo")
|
||||
|
||||
# Update config file
|
||||
write(file, b"bar")
|
||||
config.refresh("file_test")
|
||||
self.assertEqual(config.get("file_test"), "foobar")
|
||||
|
||||
unlink(file_name)
|
||||
|
||||
def test_uri_env_full(self):
|
||||
"""Test URI set as env variable"""
|
||||
environ["AUTHENTIK_TEST_VAR"] = "file:///foo?bar"
|
||||
config = ConfigLoader()
|
||||
self.assertEqual(config.get("test_var"), "bar")
|
||||
|
||||
def test_file_update(self):
|
||||
"""Test update_from_file"""
|
||||
config = ConfigLoader()
|
||||
file, file_name = mkstemp()
|
||||
write(file, b"{")
|
||||
file2, file2_name = mkstemp()
|
||||
write(file2, b"{")
|
||||
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||
with self.assertRaises(ImproperlyConfigured):
|
||||
config.update_from_file(file_name)
|
||||
config.update_from_file(file2_name)
|
||||
unlink(file_name)
|
||||
unlink(file2_name)
|
||||
|
||||
def test_get_int(self):
|
||||
"""Test get_int"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo", 1234)
|
||||
self.assertEqual(config.get_int("foo"), 1234)
|
||||
|
||||
def test_get_int_invalid(self):
|
||||
"""Test get_int"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_int("foo", 1234), 1234)
|
||||
|
||||
def test_get_dict_from_b64_json(self):
|
||||
"""Test get_dict_from_b64_json"""
|
||||
config = ConfigLoader()
|
||||
test_value = b' { "foo": "bar" } '
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_missing_brackets(self):
|
||||
"""Test get_dict_from_b64_json with missing brackets"""
|
||||
config = ConfigLoader()
|
||||
test_value = b' "foo": "bar" '
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_invalid(self):
|
||||
"""Test get_dict_from_b64_json with invalid value"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {})
|
||||
|
||||
def test_attr_json_encoder(self):
|
||||
"""Test AttrEncoder"""
|
||||
test_attr = Attr("foo", Attr.Source.ENV, "AUTHENTIK_REDIS__USERNAME")
|
||||
json_attr = dumps(test_attr, indent=4, cls=AttrEncoder)
|
||||
self.assertEqual(json_attr, '"foo"')
|
||||
|
||||
def test_attr_json_encoder_no_attr(self):
|
||||
"""Test AttrEncoder if no Attr is passed"""
|
||||
|
||||
class Test:
|
||||
"""Non Attr class"""
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
test_obj = Test()
|
||||
dumps(test_obj, indent=4, cls=AttrEncoder)
|
||||
|
||||
def test_get_optional_int(self):
|
||||
config = ConfigLoader()
|
||||
self.assertEqual(config.get_optional_int("foo", 21), 21)
|
||||
self.assertEqual(config.get_optional_int("foo"), None)
|
||||
config.set("foo", "21")
|
||||
self.assertEqual(config.get_optional_int("foo"), 21)
|
||||
self.assertEqual(config.get_optional_int("foo", 0), 21)
|
||||
self.assertEqual(config.get_optional_int("foo", "null"), 21)
|
||||
config.set("foo", "null")
|
||||
self.assertEqual(config.get_optional_int("foo"), None)
|
||||
self.assertEqual(config.get_optional_int("foo", 21), None)
|
||||
|
||||
@mock.patch.dict(environ, check_deprecations_env_vars)
|
||||
def test_check_deprecations(self):
|
||||
"""Test config key re-write for deprecated env vars"""
|
||||
config = ConfigLoader()
|
||||
config.update_from_env()
|
||||
config.check_deprecations()
|
||||
self.assertEqual(config.get("redis.broker_url", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.broker_transport_options", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_flows", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_policies", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_reputation", UNSET), UNSET)
|
||||
self.assertEqual(config.get("broker.url"), "redis://myredis:8327/43")
|
||||
self.assertEqual(config.get("broker.transport_options"), "bWFzdGVybmFtZT1teW1hc3Rlcg==")
|
||||
self.assertEqual(config.get("cache.timeout"), "124s")
|
||||
self.assertEqual(config.get("cache.timeout_flows"), "32m")
|
||||
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
|
||||
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")
|
||||
|
||||
def test_get_keys(self):
|
||||
"""Test get_keys"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo.bar", "baz")
|
||||
self.assertEqual(list(config.get_keys("foo")), ["bar"])
|
||||
|
||||
def test_db_default(self):
|
||||
"""Test default DB Config"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.sslmode", "foo")
|
||||
config.set("postgresql.sslrootcert", "foo")
|
||||
config.set("postgresql.sslcert", "foo")
|
||||
config.set("postgresql.sslkey", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_conn_max_age(self):
|
||||
"""Test DB conn_max_age Config"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.conn_max_age", "null")
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf["default"]["CONN_MAX_AGE"],
|
||||
None,
|
||||
)
|
||||
|
||||
def test_db_read_replicas(self):
|
||||
"""Test read replicas"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.sslmode", "foo")
|
||||
config.set("postgresql.sslrootcert", "foo")
|
||||
config.set("postgresql.sslcert", "foo")
|
||||
config.set("postgresql.sslkey", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
# Read replica
|
||||
config.set("postgresql.read_replicas.0.host", "bar")
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
},
|
||||
"replica_0": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "bar",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_read_replicas_pgbouncer(self):
|
||||
"""Test read replicas"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.sslmode", "foo")
|
||||
config.set("postgresql.sslrootcert", "foo")
|
||||
config.set("postgresql.sslcert", "foo")
|
||||
config.set("postgresql.sslkey", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
config.set("postgresql.use_pgbouncer", True)
|
||||
# Read replica
|
||||
config.set("postgresql.read_replicas.0.host", "bar")
|
||||
# Override conn_max_age
|
||||
config.set("postgresql.read_replicas.0.conn_max_age", 10)
|
||||
# This isn't supported
|
||||
config.set("postgresql.read_replicas.0.use_pgbouncer", False)
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"DISABLE_SERVER_SIDE_CURSORS": True,
|
||||
"CONN_MAX_AGE": None,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
},
|
||||
"replica_0": {
|
||||
"DISABLE_SERVER_SIDE_CURSORS": True,
|
||||
"CONN_MAX_AGE": 10,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "bar",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_read_replicas_pgpool(self):
|
||||
"""Test read replicas"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.sslmode", "foo")
|
||||
config.set("postgresql.sslrootcert", "foo")
|
||||
config.set("postgresql.sslcert", "foo")
|
||||
config.set("postgresql.sslkey", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
config.set("postgresql.use_pgpool", True)
|
||||
# Read replica
|
||||
config.set("postgresql.read_replicas.0.host", "bar")
|
||||
# This isn't supported
|
||||
config.set("postgresql.read_replicas.0.use_pgpool", False)
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"DISABLE_SERVER_SIDE_CURSORS": True,
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
},
|
||||
"replica_0": {
|
||||
"DISABLE_SERVER_SIDE_CURSORS": True,
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "bar",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_read_replicas_diff_ssl(self):
|
||||
"""Test read replicas (with different SSL Settings)"""
|
||||
"""Test read replicas"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.sslmode", "foo")
|
||||
config.set("postgresql.sslrootcert", "foo")
|
||||
config.set("postgresql.sslcert", "foo")
|
||||
config.set("postgresql.sslkey", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
# Read replica
|
||||
config.set("postgresql.read_replicas.0.host", "bar")
|
||||
config.set("postgresql.read_replicas.0.sslcert", "bar")
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "foo",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
},
|
||||
"replica_0": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "bar",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": False,
|
||||
"sslcert": "bar",
|
||||
"sslkey": "foo",
|
||||
"sslmode": "foo",
|
||||
"sslrootcert": "foo",
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_pool(self):
|
||||
"""Test DB Config with pool"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
config.set("postgresql.use_pool", True)
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": True,
|
||||
"sslcert": None,
|
||||
"sslkey": None,
|
||||
"sslmode": None,
|
||||
"sslrootcert": None,
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test_db_pool_options(self):
|
||||
"""Test DB Config with pool"""
|
||||
config = ConfigLoader()
|
||||
config.set("postgresql.host", "foo")
|
||||
config.set("postgresql.name", "foo")
|
||||
config.set("postgresql.user", "foo")
|
||||
config.set("postgresql.password", "foo")
|
||||
config.set("postgresql.port", "foo")
|
||||
config.set("postgresql.test.name", "foo")
|
||||
config.set("postgresql.use_pool", True)
|
||||
config.set(
|
||||
"postgresql.pool_options",
|
||||
base64.b64encode(
|
||||
dumps(
|
||||
{
|
||||
"max_size": 15,
|
||||
}
|
||||
).encode()
|
||||
).decode(),
|
||||
)
|
||||
conf = django_db_config(config)
|
||||
self.assertEqual(
|
||||
conf,
|
||||
{
|
||||
"default": {
|
||||
"ENGINE": "authentik.root.db",
|
||||
"HOST": "foo",
|
||||
"NAME": "foo",
|
||||
"OPTIONS": {
|
||||
"pool": {
|
||||
"max_size": 15,
|
||||
},
|
||||
"sslcert": None,
|
||||
"sslkey": None,
|
||||
"sslmode": None,
|
||||
"sslrootcert": None,
|
||||
},
|
||||
"PASSWORD": "foo",
|
||||
"PORT": "foo",
|
||||
"TEST": {"NAME": "foo"},
|
||||
"USER": "foo",
|
||||
"CONN_MAX_AGE": 0,
|
||||
"CONN_HEALTH_CHECKS": False,
|
||||
"DISABLE_SERVER_SIDE_CURSORS": False,
|
||||
}
|
||||
},
|
||||
)
|
@ -1,7 +0,0 @@
|
||||
class AuthentikException(Exception):
|
||||
"""Base class for authentik exceptions"""
|
||||
|
||||
|
||||
class NotReportedException(AuthentikException):
|
||||
"""Exception base class for all errors that are suppressed,
|
||||
and not sent to any kind of monitoring."""
|
@ -1,6 +0,0 @@
|
||||
from authentik.common.exceptions import NotReportedException
|
||||
|
||||
|
||||
class ControlFlowException(NotReportedException):
|
||||
"""Exceptions used to control the flow from exceptions, not reported as a warning/
|
||||
error in logs"""
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user