Compare commits

..

3 Commits

Author SHA1 Message Date
4d791f4fef release: 2022.12.3 2023-03-02 20:18:21 +01:00
c03a069f02 security: fix CVE-2023-26481 (#4832)
fix CVE-2023-26481

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-03-02 20:16:01 +01:00
040adb8ce7 website: always show build version in version dropdown
Signed-off-by: Jens Langhammer <jens@goauthentik.io>

#3940

# Conflicts:
#	website/docusaurus.config.js
2023-02-16 14:40:07 +01:00
2718 changed files with 173847 additions and 352422 deletions

View File

@ -1,30 +1,18 @@
[bumpversion]
current_version = 2024.6.0
current_version = 2022.12.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
serialize = {major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
rc
final
optional_value = final
[bumpversion:file:pyproject.toml]
[bumpversion:file:package.json]
[bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml]
[bumpversion:file:blueprints/schema.json]
[bumpversion:file:authentik/__init__.py]
[bumpversion:file:internal/constants/constants.go]

View File

@ -1,12 +1,8 @@
env
htmlcov
*.env.yml
**/node_modules
dist/**
build/**
build_docs/**
*Dockerfile
blueprints/local
.git
!gen-ts-api/node_modules
!gen-ts-api/dist/**
!gen-go-api/
Dockerfile

View File

@ -7,14 +7,8 @@ charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.html]
[html]
indent_size = 2
[*.{yaml,yml}]
[yaml]
indent_size = 2
[*.go]
indent_style = tab
[Makefile]
indent_style = tab

2
.github/FUNDING.yml vendored
View File

@ -1 +1 @@
custom: https://goauthentik.io/pricing/
github: [BeryJu]

View File

@ -1,9 +1,10 @@
---
name: Bug report
about: Create a report to help us improve
title: ""
title: ''
labels: bug
assignees: ""
assignees: ''
---
**Describe the bug**
@ -11,7 +12,6 @@ A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
@ -27,9 +27,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,9 +1,10 @@
---
name: Feature request
about: Suggest an idea for this project
title: ""
title: ''
labels: enhancement
assignees: ""
assignees: ''
---
**Is your feature request related to a problem? Please describe.**

View File

@ -1,17 +0,0 @@
---
name: Hackathon Idea
about: Propose an idea for the hackathon
title: ""
labels: hackathon
assignees: ""
---
**Describe the idea**
A clear concise description of the idea you want to implement
You're also free to work on existing GitHub issues, whether they be feature requests or bugs, just link the existing GitHub issue here.
<!-- Don't modify below here -->
If you want to help working on this idea or want to contribute in any other way, react to this issue with a :rocket:

View File

@ -1,15 +1,16 @@
---
name: Question
about: Ask a question about a feature or specific configuration
title: ""
title: ''
labels: question
assignees: ""
assignees: ''
---
**Describe your question/**
A clear and concise description of what you're trying to do.
**Relevant info**
**Relevant infos**
i.e. Version of other software you're using, specifics of your setup
**Screenshots**
@ -19,9 +20,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,5 +1,5 @@
name: "Comment usage instructions on PRs"
description: "Comment usage instructions on PRs"
name: 'Comment usage instructions on PRs'
description: 'Comment usage instructions on PRs'
inputs:
tag:
@ -9,12 +9,15 @@ inputs:
runs:
using: "composite"
steps:
- name: Generate config
id: ev
uses: ./.github/actions/docker-push-variables
- name: Find Comment
uses: peter-evans/find-comment@v2
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
comment-author: 'github-actions[bot]'
body-includes: authentik PR Installation instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
@ -35,14 +38,6 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -59,17 +54,6 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -1,47 +1,58 @@
---
name: "Prepare docker environment variables"
description: "Prepare docker environment variables"
inputs:
image-name:
required: true
description: "Docker image prefix"
image-arch:
required: false
description: "Docker image arch"
name: 'Prepare docker environment variables'
description: 'Prepare docker environment variables'
outputs:
shouldBuild:
description: "Whether to build image or not"
value: ${{ steps.ev.outputs.shouldBuild }}
branchName:
description: "Branch name"
value: ${{ steps.ev.outputs.branchName }}
branchNameContainer:
description: "Branch name (for containers)"
value: ${{ steps.ev.outputs.branchNameContainer }}
timestamp:
description: "Timestamp"
value: ${{ steps.ev.outputs.timestamp }}
sha:
description: "sha"
value: ${{ steps.ev.outputs.sha }}
version:
description: "Version"
description: "version"
value: ${{ steps.ev.outputs.version }}
prerelease:
description: "Prerelease"
value: ${{ steps.ev.outputs.prerelease }}
imageTags:
description: "Docker image tags"
value: ${{ steps.ev.outputs.imageTags }}
imageMainTag:
description: "Docker image main tag"
value: ${{ steps.ev.outputs.imageMainTag }}
versionFamily:
description: "versionFamily"
value: ${{ steps.ev.outputs.versionFamily }}
runs:
using: "composite"
steps:
- name: Generate config
id: ev
shell: bash
env:
IMAGE_NAME: ${{ inputs.image-name }}
IMAGE_ARCH: ${{ inputs.image-arch }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
shell: python
run: |
python3 ${{ github.action_path }}/push_vars.py
"""Helper script to get the actual branch name, docker safe"""
import configparser
import os
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
branch_name = os.environ["GITHUB_HEAD_REF"]
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
version = parser.get("bumpversion", "current_version")
version_family = ".".join(version.split(".")[:-1])
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print("branchName=%s" % branch_name, file=_output)
print("branchNameContainer=%s" % safe_branch_name, file=_output)
print("timestamp=%s" % int(time()), file=_output)
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
print("shouldBuild=%s" % should_build, file=_output)
print("version=%s" % version, file=_output)
print("versionFamily=%s" % version_family, file=_output)

View File

@ -1,62 +0,0 @@
"""Helper script to get the actual branch name, docker safe"""
import configparser
import os
from time import time
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
should_build = str(os.environ.get("DOCKER_USERNAME", None) is not None).lower()
branch_name = os.environ["GITHUB_REF"]
if os.environ.get("GITHUB_HEAD_REF", "") != "":
branch_name = os.environ["GITHUB_HEAD_REF"]
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
image_names = os.getenv("IMAGE_NAME").split(",")
image_arch = os.getenv("IMAGE_ARCH") or None
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
is_release = "dev" not in image_names[0]
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
# 2042.1.0 or 2042.1.0-rc1
version = parser.get("bumpversion", "current_version")
# 2042.1
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
prerelease = "-" in version
image_tags = []
if is_release:
for name in image_names:
image_tags += [
f"{name}:{version}",
]
if not prerelease:
image_tags += [
f"{name}:latest",
f"{name}:{version_family}",
]
else:
suffix = ""
if image_arch and image_arch != "amd64":
suffix = f"-{image_arch}"
for name in image_names:
image_tags += [
f"{name}:gh-{sha}{suffix}", # Used for ArgoCD and PR comments
f"{name}:gh-{safe_branch_name}{suffix}", # For convenience
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
]
image_main_tag = image_tags[0]
image_tags_rendered = ",".join(image_tags)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldBuild={should_build}", file=_output)
print(f"sha={sha}", file=_output)
print(f"version={version}", file=_output)
print(f"prerelease={prerelease}", file=_output)
print(f"imageTags={image_tags_rendered}", file=_output)
print(f"imageMainTag={image_main_tag}", file=_output)

View File

@ -1,7 +0,0 @@
#!/bin/bash -x
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
GITHUB_OUTPUT=/dev/stdout \
GITHUB_REF=ref \
GITHUB_SHA=sha \
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
python $SCRIPT_DIR/push_vars.py

View File

@ -1,40 +1,31 @@
name: "Setup authentik testing environment"
description: "Setup authentik testing environment"
inputs:
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
name: 'Setup authentik testing environment'
description: 'Setup authentik testing environment'
runs:
using: "composite"
steps:
- name: Install poetry & deps
- name: Install poetry
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
sudo apt update
sudo apt install -y libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
uses: actions/setup-python@v3
with:
python-version-file: "pyproject.toml"
cache: "poetry"
python-version: '3.11'
cache: 'poetry'
- name: Setup node
uses: actions/setup-node@v4
uses: actions/setup-node@v3.1.0
with:
node-version-file: web/package.json
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Setup go
uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/docker-compose.yml up -d
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry env use python3.11
poetry install
cd web && npm ci
- name: Generate config

View File

@ -1,19 +1,23 @@
version: '3.7'
services:
postgresql:
image: docker.io/library/postgres:${PSQL_TAG:-16}
container_name: postgres
image: library/postgres:12
volumes:
- db-data:/var/lib/postgresql/data
- db-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
POSTGRES_DB: authentik
ports:
- 5432:5432
- 5432:5432
restart: always
redis:
image: docker.io/library/redis
container_name: redis
image: library/redis
ports:
- 6379:6379
- 6379:6379
restart: always
volumes:

View File

@ -1,2 +0,0 @@
enabled: true
preservePullRequestTitle: true

11
.github/codecov.yml vendored
View File

@ -1,10 +1,3 @@
coverage:
status:
project:
default:
target: auto
# adjust accordingly based on how flaky your tests are
# this allows a 1% drop from the previous base commit coverage
threshold: 1%
comment:
after_n_builds: 3
precision: 2
round: up

View File

@ -1 +0,0 @@
authentic->authentik

View File

@ -2,6 +2,3 @@ keypair
keypairs
hass
warmup
ontext
singed
assertIn

155
.github/dependabot.yml vendored
View File

@ -1,97 +1,62 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "ci:"
labels:
- dependencies
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: npm
directories:
- "/web"
- "/tests/wdio"
- "/web/sfe"
schedule:
interval: daily
time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
groups:
sentry:
patterns:
- "@sentry/*"
- "@spotlightjs/*"
babel:
patterns:
- "@babel/*"
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
rollup:
patterns:
- "@rollup/*"
- "rollup-*"
wdio:
patterns:
- "@wdio/*"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "website:"
labels:
- dependencies
groups:
docusaurus:
patterns:
- "@docusaurus/*"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"

View File

@ -1,34 +1,19 @@
<!--
👋 Hi there! Welcome.
👋 Hello there! Welcome.
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/main/CONTRIBUTING.md#how-can-i-contribute).
-->
## Details
# Details
* **Does this resolve an issue?**
Resolves #
<!--
Explain what this PR changes, what the rationale behind the change is, if any new requirements are introduced or any breaking changes caused by this PR.
## Changes
### New Features
* Adds feature which does x, y, and z.
Ideally also link an Issue for context that this PR will close using `closes #`
-->
REPLACE ME
### Breaking Changes
* Adds breaking change which causes \<issue\>.
---
## Checklist
- [ ] Local tests pass (`ak test authentik/`)
- [ ] The code has been formatted (`make lint-fix`)
If an API change has been made
- [ ] The API schema has been updated (`make gen-build`)
If changes to the frontend have been made
- [ ] The code has been formatted (`make web`)
If applicable
- [ ] The documentation has been updated
- [ ] The documentation has been formatted (`make website`)
## Additional
Any further notes or comments you want to make.

18
.github/stale.yml vendored Normal file
View File

@ -0,0 +1,18 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
- pr_wanted
- enhancement
- bug/confirmed
- enhancement/confirmed
- question
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.

View File

@ -2,15 +2,15 @@ git:
filters:
- filter_type: file
# all supported i18n types: https://docs.transifex.com/formats
file_format: XLIFF
file_format: PO
source_language: en
source_file: web/xliff/en.xlf
source_file: web/src/locales/en.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "web/xliff/<lang>.xlf"
translation_files_expression: 'web/src/locales/<lang>.po'
- filter_type: file
# all supported i18n types: https://docs.transifex.com/formats
file_format: PO
source_language: en
source_file: locale/en/LC_MESSAGES/django.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'

View File

@ -1,65 +0,0 @@
name: authentik-api-py-publish
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
permissions:
id-token: write
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Install poetry & deps
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v5
with:
python-version-file: "pyproject.toml"
cache: "poetry"
- name: Generate API Client
run: make gen-client-py
- name: Publish package
working-directory: gen-py-api/
run: |
poetry build
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
packages-dir: gen-py-api/dist/
# We can't easily upgrade the API client being used due to poetry being poetry
# so we'll have to rely on dependabot
# - name: Upgrade /
# run: |
# export VERSION=$(cd gen-py-api && poetry version -s)
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
# - uses: peter-evans/create-pull-request@v6
# id: cpr
# with:
# token: ${{ steps.generate_token.outputs.token }}
# branch: update-root-api-client
# commit-message: "root: bump API Client version"
# title: "root: bump API Client version"
# body: "root: bump API Client version"
# delete-branch: true
# signoff: true
# # ID from https://api.github.com/users/authentik-automation[bot]
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
# - uses: peter-evans/enable-pull-request-automerge@v3
# with:
# token: ${{ steps.generate_token.outputs.token }}
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
# merge-method: squash

View File

@ -1,59 +0,0 @@
name: authentik-api-ts-publish
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm ci
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
- name: Upgrade /web
working-directory: web
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- name: Upgrade /web/sfe
working-directory: web/sfe
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v6
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -1,4 +1,3 @@
---
name: authentik-ci-main
on:
@ -7,10 +6,11 @@ on:
- main
- next
- version-*
paths-ignore:
- website
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
@ -23,14 +23,15 @@ jobs:
fail-fast: false
matrix:
job:
- bandit
- pylint
- black
- codespell
- isort
- bandit
- pyright
- pending-migrations
- ruff
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run job
@ -38,39 +39,31 @@ jobs:
test-migrations:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
continue-on-error: true
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Setup authentik env
uses: ./.github/actions/setup
- name: checkout stable
run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
git checkout $(git describe --abbrev=0 --match 'version/*')
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (stable)
- name: Setup authentik env (ensure stable deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable
run: poetry run python -m lifecycle.migrate
- name: checkout current code
@ -80,97 +73,52 @@ jobs:
git reset --hard HEAD
git clean -d -fx .
git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
poetry install
- name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: migrate to latest
run: |
poetry run python -m lifecycle.migrate
- name: run tests
env:
# Test in the main database that we just migrated from the previous stable version
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
run: |
poetry run make test
run: poetry run python -m lifecycle.migrate
test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
psql:
- 15-alpine
- 16-alpine
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: run unittest
run: |
poetry run make test
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
flags: unit
token: ${{ secrets.CODECOV_TOKEN }}
test-integration:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.10.0
uses: helm/kind-action@v1.5.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
poetry run make test-integration
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
flags: integration
token: ${{ secrets.CODECOV_TOKEN }}
test-e2e:
name: test-e2e (${{ matrix.job.name }})
test-e2e-provider:
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
job:
- name: proxy
glob: tests/e2e/test_provider_proxy*
- name: oauth
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
- name: oauth-oidc
glob: tests/e2e/test_provider_oidc*
- name: saml
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: radius
glob: tests/e2e/test_provider_radius*
- name: scim
glob: tests/e2e/test_source_scim*
- name: flows
glob: tests/e2e/test_flows*
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/docker-compose.yml up -d
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
@ -183,13 +131,41 @@ jobs:
npm run build
- name: run e2e
run: |
poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run make test-e2e-provider
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
flags: e2e
test-e2e-rest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web/
run: |
npm ci
make -C .. gen-client-ts
npm run build
- name: run e2e
run: |
poetry run make test-e2e-rest
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
with:
flags: e2e
token: ${{ secrets.CODECOV_TOKEN }}
ci-core-mark:
needs:
- lint
@ -197,83 +173,55 @@ jobs:
- test-migrations-from-stable
- test-unittest
- test-integration
- test-e2e
- test-e2e-rest
- test-e2e-provider
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120
- 'linux/amd64'
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
image-arch: ${{ matrix.arch }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@v6
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
context: .
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max
platforms: linux/${{ matrix.arch }}
pr-comment:
needs:
- build
runs-on: ubuntu-latest
if: ${{ github.event_name == 'pull_request' }}
permissions:
# Needed to write comments on PRs
pull-requests: write
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-server
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
- name: Comment on PR
if: github.event_name == 'pull_request'
continue-on-error: true
uses: ./.github/actions/comment-pr-instructions
with:
tag: gh-${{ steps.ev.outputs.imageMainTag }}
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}

View File

@ -1,4 +1,3 @@
---
name: authentik-ci-outpost
on:
@ -10,16 +9,15 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-golint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Prepare and generate API
run: |
# Create folder structure for go embeds
@ -29,20 +27,14 @@ jobs:
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
with:
version: v1.54.2
args: --timeout 5000s --verbose
skip-cache: true
uses: golangci/golangci-lint-action@v3
test-unittest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- name: Setup authentik env
uses: ./.github/actions/setup
go-version: "^1.17"
- name: Generate API
run: make gen-client-go
- name: Go unittests
@ -55,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- run: echo mark
build-container:
build:
timeout-minutes: 120
needs:
- ci-outpost-mark
@ -65,49 +57,43 @@ jobs:
type:
- proxy
- ldap
- radius
- rac
arch:
- 'linux/amd64'
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@v6
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
file: ${{ matrix.type }}.Dockerfile
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache,mode=max
build-binary:
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
build-outpost-binary:
timeout-minutes: 120
needs:
- ci-outpost-mark
@ -118,21 +104,17 @@ jobs:
type:
- proxy
- ldap
- radius
- rac
goos: [linux]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@v5
go-version: "^1.17"
- uses: actions/setup-node@v3.5.1
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go
@ -146,5 +128,8 @@ jobs:
set -x
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- uses: actions/upload-artifact@v3
with:
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@ -9,58 +9,83 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint:
lint-eslint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- lint
- lint:lockfile
- tsc
- prettier-check
project:
- web
- tests/wdio
include:
- command: tsc
project: web
extra_setup: |
cd sfe/ && npm ci
- command: lit-analyse
project: web
extra_setup: |
# lit-analyse doesn't understand path rewrites, so make it
# belive it's an actual module
cd node_modules/@goauthentik
ln -s ../../src/ web
exclude:
- command: lint:lockfile
project: tests/wdio
- command: tsc
project: tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
run: |
npm ci
${{ matrix.extra_setup }}
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
- name: Eslint
working-directory: web/
run: npm run lint
lint-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: TSC
working-directory: web/
run: npm run tsc
lint-prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: prettier
working-directory: web/
run: npm run prettier-check
lint-lit-analyse:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: |
npm ci
# lit-analyse doesn't understand path rewrites, so make it
# belive it's an actual module
cd node_modules/@goauthentik
ln -s ../../src/ web
- name: Generate API
run: make gen-client-ts
- name: lit-analyse
working-directory: web/
run: npm run lit-analyse
ci-web-mark:
needs:
- lint
- lint-eslint
- lint-prettier
- lint-lit-analyse
- lint-build
runs-on: ubuntu-latest
steps:
- run: echo mark
@ -69,11 +94,11 @@ jobs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version-file: web/package.json
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -82,21 +107,3 @@ jobs:
- name: build
working-directory: web/
run: npm run build
test:
needs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: test
working-directory: web/
run: npm run test

View File

@ -9,63 +9,25 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- lint:lockfile
- prettier-check
steps:
- uses: actions/checkout@v4
- working-directory: website/
run: npm ci
- name: Lint
working-directory: website/
run: npm run ${{ matrix.command }}
test:
lint-prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version-file: website/package.json
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: test
- name: prettier
working-directory: website/
run: npm test
build:
runs-on: ubuntu-latest
name: ${{ matrix.job }}
strategy:
fail-fast: false
matrix:
job:
- build
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: build
working-directory: website/
run: npm run ${{ matrix.job }}
run: npm run prettier-check
ci-website-mark:
needs:
- lint
- test
- build
- lint-prettier
runs-on: ubuntu-latest
steps:
- run: echo mark

View File

@ -2,11 +2,12 @@ name: "CodeQL"
on:
push:
branches: [main, "*", next, version*]
branches: [ main, '*', next, version* ]
pull_request:
branches: [main]
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: "30 6 * * 5"
- cron: '30 6 * * 5'
jobs:
analyze:
@ -20,17 +21,40 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ["go", "javascript", "python"]
language: [ 'go', 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -1,43 +0,0 @@
name: authentik-gen-update-webauthn-mds
on:
workflow_dispatch:
schedule:
- cron: '30 1 1,15 * *'
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: poetry run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@v6
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-fido-mds-client
commit-message: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
title: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
body: "stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -1,38 +0,0 @@
---
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup cache after PR is closed
on:
pull_request:
types:
- closed
permissions:
# Permission to delete cache
actions: write
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
# Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR; do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,8 +1,8 @@
name: ghcr-retention
on:
# schedule:
# - cron: "0 0 * * *" # every day at midnight
schedule:
- cron: '0 0 * * *' # every day at midnight
workflow_dispatch:
jobs:
@ -10,18 +10,13 @@ jobs:
name: Delete old unused container images
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@v2
uses: sondrelg/container-retention-policy@v1
with:
image-names: dev-server,dev-ldap,dev-proxy
cut-off: One week ago UTC
account-type: org
org-name: goauthentik
untagged-only: false
token: ${{ steps.generate_token.outputs.token }}
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
skip-tags: gh-next,gh-main

View File

@ -1,61 +0,0 @@
---
name: authentik-compress-images
on:
push:
branches:
- main
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
pull_request:
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
workflow_dispatch:
jobs:
compress:
name: compress
runs-on: ubuntu-latest
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway
if: |
github.repository == 'goauthentik/authentik' &&
(github.event_name != 'pull_request' ||
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@main
with:
githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v6
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
title: "*: Auto compress images"
branch-suffix: timestamp
commit-messsage: "*: compress images"
body: ${{ steps.compress.outputs.markdown }}
delete-branch: true
signoff: true
- uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -1,31 +0,0 @@
name: authentik-publish-source-docs
on:
push:
branches:
- main
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
publish-source-docs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
- name: generate docs
run: |
poetry run make migrate
poetry run ak build_source_docs
- name: Publish
uses: netlify/actions/cli@master
with:
args: deploy --dir=source_docs --prod
env:
NETLIFY_SITE_ID: eb246b7b-1d83-4f69-89f7-01a936b4ca59
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}

View File

@ -1,21 +0,0 @@
name: authentik-on-release-next-branch
on:
schedule:
- cron: "0 12 * * *" # every day at noon
workflow_dispatch:
permissions:
# Needed to be able to push to the next branch
contents: write
jobs:
update-next:
runs-on: ubuntu-latest
environment: internal-production
steps:
- uses: actions/checkout@v4
with:
ref: main
- run: |
git push origin --force main:next

View File

@ -1,4 +1,3 @@
---
name: authentik-on-release
on:
@ -8,123 +7,112 @@ on:
jobs:
build-server:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server,beryju/authentik
- name: Docker Login Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Build Docker Image
uses: docker/build-push-action@v6
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
context: .
push: true
push: ${{ github.event_name == 'release' }}
secrets:
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: |
beryju/authentik:${{ steps.ev.outputs.version }},
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
beryju/authentik:latest,
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
ghcr.io/goauthentik/server:latest
platforms: linux/amd64,linux/arm64
build-args: |
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
- name: Docker Login Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
beryju/authentik-${{ matrix.type }}:latest,
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
ghcr.io/goauthentik/${{ matrix.type }}:latest
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/amd64,linux/arm64
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
- radius
- rac
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Docker Login Registry
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v6
with:
push: true
tags: ${{ steps.ev.outputs.imageTags }}
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
build-args: |
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost-binary:
timeout-minutes: 120
runs-on: ubuntu-latest
permissions:
# Needed to upload binaries to the release
contents: write
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
- radius
goos: [linux, darwin]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
go-version: "^1.17"
- uses: actions/setup-node@v3.5.1
with:
node-version-file: web/package.json
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Build web
working-directory: web/
@ -136,7 +124,6 @@ jobs:
set -x
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@v2
@ -152,15 +139,15 @@ jobs:
- build-outpost-binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Run test suite in final docker images
run: |
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
sentry-release:
needs:
- build-server
@ -168,27 +155,24 @@ jobs:
- build-outpost-binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server
- name: Get static files from docker image
run: |
docker pull ${{ steps.ev.outputs.imageMainTag }}
container=$(docker container create ${{ steps.ev.outputs.imageMainTag }})
docker pull ghcr.io/goauthentik/server:latest
container=$(docker container create ghcr.io/goauthentik/server:latest)
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@v1
continue-on-error: true
if: ${{ github.event_name == 'release' }}
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: authentik-security-inc
SENTRY_PROJECT: authentik
with:
version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist"
url_prefix: "~/static/dist"
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

View File

@ -1,48 +1,41 @@
---
name: authentik-on-tag
on:
push:
tags:
- "version/*"
- 'version/*'
jobs:
build:
name: Create Release from Tag
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Pre-release test
run: |
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v2
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
- name: Extract version number
id: get_version
uses: actions/github-script@v6
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with:
image-name: ghcr.io/goauthentik/server
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
- name: Create Release
id: create_release
uses: actions/create-release@v1.1.4
env:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ steps.ev.outputs.version }}
release_name: Release ${{ steps.get_version.outputs.result }}
draft: true
prerelease: ${{ steps.ev.outputs.prerelease == 'true' }}
prerelease: false

View File

@ -1,33 +0,0 @@
name: 'authentik-repo-stale'
on:
schedule:
- cron: '30 1 * * *'
workflow_dispatch:
permissions:
# Needed to update issues and PRs
issues: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@v9
with:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Don't stale PRs, so only apply to PRs with a non-existent label
only-pr-labels: foo

View File

@ -1,39 +0,0 @@
name: authentik-translation-advice
on:
pull_request:
branches:
- main
paths:
- "!**"
- "locale/**"
- "!locale/en/**"
- "web/xliff/**"
permissions:
# Permission to write comment
pull-requests: write
jobs:
post-comment:
runs-on: ubuntu-latest
steps:
- name: Find Comment
uses: peter-evans/find-comment@v3
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
body-includes: authentik translations instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v4
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
### authentik translations instructions
Thanks for your pull request!
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.

View File

@ -0,0 +1,36 @@
name: authentik-backend-translate-compile
on:
push:
branches: [ main ]
paths:
- '/locale/'
pull_request:
paths:
- '/locale/'
workflow_dispatch:
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
compile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
run: poetry run ./manage.py compilemessages
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: compile-backend-translation
commit-message: "core: compile backend translations"
title: "core: compile backend translations"
body: "core: compile backend translations"
delete-branch: true
signoff: true

View File

@ -1,43 +0,0 @@
---
name: authentik-backend-translate-extract-compile
on:
schedule:
- cron: "0 0 * * *" # every day at midnight
workflow_dispatch:
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
compile:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run extract
run: |
poetry run make i18n-extract
- name: run compile
run: |
poetry run ak compilemessages
make web-check-compile
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
with:
token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation
commit-message: "core, web: update translations"
title: "core, web: update translations"
body: "core, web: update translations"
delete-branch: true
signoff: true

View File

@ -1,49 +0,0 @@
# Rename transifex pull requests to have a correct naming
# Also enables auto squash-merge
name: authentik-translation-transifex-rename
on:
pull_request:
types: [opened, reopened]
permissions:
# Permission to rename PR
pull-requests: write
jobs:
rename_pr:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps:
- id: generate_token
uses: tibdex/github-app-token@v2
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Get current title
id: title
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ github.event.pull_request.number }}
merge-method: squash

41
.github/workflows/web-api-publish.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: authentik-web-api-publish
on:
push:
branches: [ main ]
paths:
- 'schema.yml'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
registry-url: 'https://registry.npmjs.org'
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm ci
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
- name: Upgrade /web
working-directory: web/
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true

9
.gitignore vendored
View File

@ -166,7 +166,6 @@ dmypy.json
# SageMath parsed files
# Environments
**/.DS_Store
# Spyder project settings
@ -201,11 +200,3 @@ media/
.idea/
/gen-*/
data/
# Local Netlify folder
.netlify
.ruff_cache
source_docs/
### Golang ###
/vendor/

View File

@ -1,21 +0,0 @@
{
"recommendations": [
"bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
"github.vscode-github-actions",
"golang.go",
"Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv",
"ms-python.black-formatter",
"charliermarsh.ruff",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx",
]
}

27
.vscode/launch.json vendored
View File

@ -1,27 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: PDB attach Server",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 6800
},
"justMyCode": true,
"django": true
},
{
"name": "Python: PDB attach Worker",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 6900
},
"justMyCode": true,
"django": true
},
]
}

21
.vscode/settings.json vendored
View File

@ -4,24 +4,22 @@
"asgi",
"authentik",
"authn",
"entra",
"goauthentik",
"jwks",
"kubernetes",
"oidc",
"openid",
"passwordless",
"plex",
"saml",
"scim",
"slo",
"sso",
"totp",
"traefik",
"webauthn",
"traefik",
"passwordless",
"kubernetes"
],
"python.linting.pylintEnabled": true,
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"python.formatting.provider": "black",
"yaml.customTags": [
"!Find sequence",
"!KeyOf scalar",
@ -30,8 +28,7 @@
"!Format sequence",
"!Condition sequence",
"!Env sequence",
"!Env scalar",
"!If sequence"
"!Env scalar"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",
@ -47,11 +44,5 @@
"url": "https://github.com/goauthentik/authentik/issues/<num>",
"ignoreCase": false
}
],
"go.testFlags": [
"-count=1"
],
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]
}

View File

@ -1,28 +0,0 @@
# Fallback
* @goauthentik/backend @goauthentik/frontend
# Backend
authentik/ @goauthentik/backend
blueprints/ @goauthentik/backend
cmd/ @goauthentik/backend
internal/ @goauthentik/backend
lifecycle/ @goauthentik/backend
schemas/ @goauthentik/backend
scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
poetry.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure
*Dockerfile @goauthentik/infrastructure
.dockerignore @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure
# Web
web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend
# Docs & Website
website/ @goauthentik/docs
# Security
website/docs/security/ @goauthentik/security

View File

@ -1 +0,0 @@
website/developer-docs/index.md

178
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,178 @@
# Contributing to authentik
:+1::tada: Thanks for taking the time to contribute! :tada::+1:
The following is a set of guidelines for contributing to authentik and its components, which are hosted in the [goauthentik Organization](https://github.com/goauthentik) on GitHub. These are mostly guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request.
#### Table Of Contents
[Code of Conduct](#code-of-conduct)
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
- [The components](#the-components)
- [authentik's structure](#authentiks-structure)
[How Can I Contribute?](#how-can-i-contribute)
- [Reporting Bugs](#reporting-bugs)
- [Suggesting Enhancements](#suggesting-enhancements)
- [Your First Code Contribution](#your-first-code-contribution)
- [Pull Requests](#pull-requests)
[Styleguides](#styleguides)
- [Git Commit Messages](#git-commit-messages)
- [Python Styleguide](#python-styleguide)
- [Documentation Styleguide](#documentation-styleguide)
## Code of Conduct
Basically, don't be a dickhead. This is an open-source non-profit project, that is made in the free time of Volunteers. If there's something you dislike or think can be done better, tell us! We'd love to hear any suggestions for improvement.
## I don't want to read this whole thing I just have a question!!!
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://goauthentik.io/discord)
## What should I know before I get started?
### The components
authentik consists of a few larger components:
- _authentik_ the actual application server, is described below.
- _outpost-proxy_ is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
- _outpost-ldap_ is a Go LDAP server that uses the _authentik_ application server as its backend
- _web_ is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
- _website_ is the Website/documentation, which uses docusaurus.
### authentik's structure
authentik is at it's very core a Django project. It consists of many individual django applications. These applications are intended to separate concerns, and they may share code between each other.
These are the current packages:
<a id="authentik-packages"/>
```
authentik
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
├── api - General API Configuration (Routes, Schema and general API utilities)
├── core - Core authentik functionality, central routes, core Models
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
├── events - Event Log, middleware and signals to generate signals
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
├── lib - Generic library of functions, few dependencies on other packages.
├── managed - Handle managed models and their state.
├── outposts - Configure and deploy outposts on kubernetes and docker.
├── policies - General PolicyEngine
│   ├── dummy - A Dummy policy used for testing
│   ├── event_matcher - Match events based on different criteria
│   ├── expiry - Check when a user's password was last set
│   ├── expression - Execute any arbitrary python code
│   ├── hibp - Check a password against HaveIBeenPwned
│   ├── password - Check a password against several rules
│   └── reputation - Check the user's/client's reputation
├── providers
│   ├── ldap - Provide LDAP access to authentik users/groups using an outpost
│   ├── oauth2 - OIDC-compliant OAuth2 provider
│   ├── proxy - Provides an identity-aware proxy using an outpost
│   └── saml - SAML2 Provider
├── recovery - Generate keys to use in case you lock yourself out
├── root - Root django application, contains global settings and routes
├── sources
│   ├── ldap - Sync LDAP users from OpenLDAP or Active Directory into authentik
│   ├── oauth - OAuth1 and OAuth2 Source
│   ├── plex - Plex source
│   └── saml - SAML2 Source
├── stages
│   ├── authenticator_duo - Configure a DUO authenticator
│   ├── authenticator_static - Configure TOTP backup keys
│   ├── authenticator_totp - Configure a TOTP authenticator
│   ├── authenticator_validate - Validate any authenticator
│   ├── authenticator_webauthn - Configure a WebAuthn authenticator
│   ├── captcha - Make the user pass a captcha
│   ├── consent - Let the user decide if they want to consent to an action
│   ├── deny - Static deny, can be used with policies
│   ├── dummy - Dummy stage to test
│   ├── email - Send the user an email and block execution until they click the link
│   ├── identification - Identify a user with any combination of fields
│   ├── invitation - Invitation system to limit flows to certain users
│   ├── password - Password authentication
│   ├── prompt - Arbitrary prompts
│   ├── user_delete - Delete the currently pending user
│   ├── user_login - Login the currently pending user
│   ├── user_logout - Logout the currently pending user
│   └── user_write - Write any currenetly pending data to the user.
└── tenants - Soft tennancy, configure defaults and branding per domain
```
This django project is running in gunicorn, which spawns multiple workers and threads. Gunicorn is run from a lightweight Go application which reverse-proxies it, handles static files and will eventually gain more functionality as more code is migrated to go.
There are also several background tasks which run in Celery, the root celery application is defined in `authentik.root.celery`.
## How Can I Contribute?
### Reporting Bugs
This section guides you through submitting a bug report for authentik. Following these guidelines helps maintainers and the community understand your report, reproduce the behavior, and find related reports.
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
### Suggesting Enhancements
This section guides you through submitting an enhancement suggestion for authentik, including completely new features and minor improvements to existing functionality. Following these guidelines helps maintainers and the community understand your suggestion and find related suggestions.
When you are creating an enhancement suggestion, please fill in [the template](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=), including the steps that you imagine you would take if the feature you're requesting existed.
### Your First Code Contribution
#### Local development
authentik can be run locally, all though depending on which part you want to work on, different pre-requisites are required.
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
### Pull Requests
The process described here has several goals:
- Maintain authentik's quality
- Fix problems that are important to users
- Engage the community in working toward the best possible authentik
- Enable a sustainable system for authentik's maintainers to review contributions
Please follow these steps to have your contribution considered by the maintainers:
1. Follow the [styleguides](#styleguides)
2. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing?</summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
3. Ensure your Code has tests. While it is not always possible to test every single case, the majority of the code should be tested.
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
## Styleguides
### Git Commit Messages
- Use the format of `<package>: <verb> <description>`
- See [here](#authentik-packages) for `package`
- Example: `providers/saml2: fix parsing of requests`
- Reference issues and pull requests liberally after the first line
### Python Styleguide
All Python code is linted with [black](https://black.readthedocs.io/en/stable/), [PyLint](https://www.pylint.org/) and [isort](https://pycqa.github.io/isort/).
authentik runs on Python 3.9 at the time of writing this.
- Use native type-annotations wherever possible.
- Add meaningful docstrings when possible.
- Ensure any database migrations work properly from the last stable version (this is checked via CI)
- If your code changes central functions, make sure nothing else is broken.
### Documentation Styleguide
- Use [MDX](https://mdxjs.com/) whenever appropriate.

View File

@ -1,186 +1,117 @@
# syntax=docker/dockerfile:1
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as website-builder
ENV NODE_ENV=production
WORKDIR /work/website
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
npm ci --include=dev
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
COPY ./schema.yml /work/
COPY ./SECURITY.md /work/
RUN npm run build-bundled
ENV NODE_ENV=production
WORKDIR /work/website
RUN npm ci && npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:22 as web-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
ENV NODE_ENV=production
WORKDIR /work/web
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/work/web/sfe/package.json,src=./web/sfe/package.json \
--mount=type=bind,target=/work/web/sfe/package-lock.json,src=./web/sfe/package-lock.json \
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
npm ci --include=dev && \
cd sfe && \
npm ci --include=dev
COPY ./package.json /work
COPY ./web /work/web/
COPY ./website /work/website/
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build && \
cd sfe && \
npm run build
ENV NODE_ENV=production
WORKDIR /work/web
RUN npm ci && npm run build
# Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/oss/go/microsoft/golang:1.22-fips-bookworm AS go-builder
# Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
ARG TARGETOS
ARG TARGETARCH
ARG TARGETVARIANT
WORKDIR /work
COPY ./pyproject.toml /work
COPY ./poetry.lock /work
ARG GOOS=$TARGETOS
ARG GOARCH=$TARGETARCH
RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --output requirements.txt && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
WORKDIR /go/src/goauthentik.io
# Stage 4: Build go proxy
FROM docker.io/golang:1.19.4-bullseye AS go-builder
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
dpkg --add-architecture arm64 && \
apt-get update && \
apt-get install -y --no-install-recommends crossbuild-essential-arm64 gcc-aarch64-linux-gnu
WORKDIR /work
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
--mount=type=cache,target=/go/pkg/mod \
go mod download
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
COPY ./cmd /go/src/goauthentik.io/cmd
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
COPY ./internal /go/src/goauthentik.io/internal
COPY ./go.mod /go/src/goauthentik.io/go.mod
COPY ./go.sum /go/src/goauthentik.io/go.sum
COPY ./cmd /work/cmd
COPY ./web/static.go /work/web/static.go
COPY ./internal /work/internal
COPY ./go.mod /work/go.mod
COPY ./go.sum /work/go.sum
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm64" ]; then export CC=aarch64-linux-gnu-gcc && export CC_FOR_TARGET=gcc-aarch64-linux-gnu; fi && \
CGO_ENABLED=1 GOEXPERIMENT="systemcrypto" GOFLAGS="-tags=requirefips" GOARM="${TARGETVARIANT#v}" \
go build -o /go/authentik ./cmd/server
RUN go build -o /work/authentik ./cmd/server/
# Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.0.1 as geoip
# Stage 5: MaxMind GeoIP
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS python-deps
WORKDIR /ak-root/poetry
ENV VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
PATH="/ak-root/venv/bin:$PATH"
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/pypoetry \
python -m venv /ak-root/venv/ && \
bash -c "source ${VENV_PATH}/bin/activate && \
pip3 install --upgrade pip && \
pip3 install poetry && \
poetry install --only=main --no-ansi --no-interaction --no-root && \
pip install --force-reinstall /wheels/*"
/bin/sh -c "\
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
/usr/bin/entry.sh || exit 0 \
"
# Stage 6: Run
FROM ghcr.io/goauthentik/fips-python:3.12.3-slim-bookworm-fips-full AS final-image
ARG GIT_BUILD_HASH
ARG VERSION
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version ${VERSION}
LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
WORKDIR /
# We cannot cache this layer otherwise we'll end up with a bigger image
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
COPY --from=poetry-locker /work/requirements.txt /
COPY --from=poetry-locker /work/requirements-dev.txt /
COPY --from=geoip /usr/share/GeoIP /geoip
RUN apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates && \
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
apt-get install -y --no-install-recommends curl runit && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
mkdir -p /certs /media /blueprints && \
mkdir -p /authentik/.ssh && \
mkdir -p /ak-root && \
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
chown authentik:authentik /certs /media /authentik/.ssh
COPY ./authentik/ /authentik
COPY ./pyproject.toml /
COPY ./poetry.lock /
COPY ./schemas /schemas
COPY ./xml /xml
COPY ./locale /locale
COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /go/authentik /bin/authentik
COPY --from=python-deps /ak-root/venv /ak-root/venv
COPY --from=go-builder /work/authentik /authentik-proxy
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/build/ /website/help/
COPY --from=geoip /usr/share/GeoIP /geoip
COPY --from=website-builder /work/website/help/ /website/help/
USER 1000
ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false
ENV TMPDIR /dev/shm/
ENV PYTHONUNBUFFERED 1
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
ENV GOFIPS=1
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
ENTRYPOINT [ "dumb-init", "--", "ak" ]
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]

View File

@ -1,11 +1,6 @@
Copyright (c) 2023 Jens Langhammer
MIT License
Portions of this software are licensed as follows:
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
Copyright (c) 2022 Jens Langhammer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

267
Makefile
View File

@ -1,224 +1,142 @@
.PHONY: gen dev-reset all clean test web website
.SHELLFLAGS += ${SHELLFLAGS} -e
.SHELLFLAGS += -x -e
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test"
GEN_API_TS = "gen-ts-api"
GEN_API_PY = "gen-py-api"
GEN_API_GO = "gen-go-api"
all: lint-fix lint test gen web
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
test-integration:
coverage run manage.py test tests/integration
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
-S 'website/developer-docs/api/reference/**' \
test-e2e-provider:
coverage run manage.py test tests/e2e/test_provider*
test-e2e-rest:
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
test-go:
go test -timeout 0 -v -race -cover ./...
test-docker:
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test
rm -f .env
test:
coverage run manage.py test --keepdb authentik
coverage html
coverage report
lint-fix:
isort authentik tests scripts lifecycle
black authentik tests scripts lifecycle
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/developer-docs \
website/docs \
website/integrations \
website/src
website/developer-docs
all: lint-fix lint test gen web ## Lint, build, and test everything
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
help: ## Show this help
@echo "\nSpecify a command. The choices are:\n"
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
sort
@echo ""
test-go:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql redis
docker compose run -u root server test-all
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
coverage run manage.py test --keepdb authentik
coverage html
coverage report
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
black $(PY_SOURCES)
ruff check --fix $(PY_SOURCES)
lint-codespell: ## Reports spelling errors.
codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources
bandit -r $(PY_SOURCES) -x web/node_modules -x tests/wdio/node_modules -x website/node_modules
lint:
pylint authentik tests lifecycle
bandit -r authentik tests lifecycle -x node_modules
golangci-lint run -v
core-install:
poetry install
migrate: ## Run the Authentik Django server's migrations
migrate:
python -m lifecycle.migrate
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
run:
go run -v ./cmd/server/
core-i18n-extract:
ak makemessages \
--add-location file \
--no-obsolete \
--ignore web \
--ignore internal \
--ignore ${GEN_API_TS} \
--ignore ${GEN_API_GO} \
--ignore website \
-l en
i18n-extract: i18n-extract-core web-extract
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core`
dev-drop-db:
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
# Also remove the test-db if it exists
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
redis-cli -n 0 flushall
dev-create-db:
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
i18n-extract-core:
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
#########################
## API Schema
#########################
gen-build: ## Extract the schema from the database
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true \
AUTHENTIK_TENANTS__ENABLED=true \
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
ak spectacular --file schema.yml
gen-build:
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
gen-diff:
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-diff:2.1.0-beta.8 \
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
--markdown /local/diff.md \
/local/old_schema.yml /local/schema.yml
rm old_schema.yml
sed -i 's/{/&#123;/g' diff.md
sed -i 's/}/&#125;/g' diff.md
npx prettier --write diff.md
gen-clean-ts: ## Remove generated API client for Typescript
rm -rf ./${GEN_API_TS}/
rm -rf ./web/node_modules/@goauthentik/api/
gen-clean:
rm -rf web/api/src/
rm -rf api/
gen-clean-go: ## Remove generated API client for Go
rm -rf ./${GEN_API_GO}/
gen-clean-py: ## Remove generated API client for Python
rm -rf ./${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
gen-client-ts:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
-o /local/gen-ts-api \
-c /local/scripts/api-ts-config.yaml \
--additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
mkdir -p web/node_modules/@goauthentik/api
cd ./${GEN_API_TS} && npm i
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
cd gen-ts-api && npm i
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
gen-client-go:
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
mkdir -p templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
-c /local/scripts/api-py-config.yaml \
--additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
pip install ./${GEN_API_PY}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
cp schema.yml ./${GEN_API_GO}/
docker run \
--rm -v ${PWD}/${GEN_API_GO}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g go \
-o /local/ \
-o /local/gen-go-api \
-c /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
rm -rf config.yaml ./templates/
gen-dev-config: ## Generate a local development config file
gen-dev-config:
python -m scripts.generate_config
gen: gen-build gen-client-ts
gen: gen-build gen-clean gen-client-ts
#########################
## Web
#########################
web-build: web-install ## Build the Authentik UI
web-build: web-install
cd web && npm run build
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
web: web-lint-fix web-lint web-check-compile
web-install: ## Install the necessary libraries to build the Authentik UI
web-install:
cd web && npm ci
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
web-watch:
rm -rf web/dist/
mkdir web/dist/
touch web/dist/.gitkeep
cd web && npm run watch
web-storybook-watch: ## Build and run the storybook documentation server
cd web && npm run storybook
web-lint-fix:
cd web && npm run prettier
@ -229,56 +147,57 @@ web-lint:
web-check-compile:
cd web && npm run tsc
web-i18n-extract:
cd web && npm run extract-locales
web-extract:
cd web && npm run extract
#########################
## Website
#########################
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
website: website-lint-fix website-build
website-install:
cd website && npm ci
website-lint-fix: lint-codespell
website-lint-fix:
cd website && npm run prettier
website-build:
cd website && npm run build
website-watch: ## Build and watch the documentation website, updating automatically
website-watch:
cd website && npm run watch
#########################
## Docker
#########################
docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
#########################
## CI
#########################
# These targets are use by GitHub actions to allow usage of matrix
# which makes the YAML File a lot smaller
PY_SOURCES=authentik tests lifecycle
ci--meta-debug:
python -V
node --version
ci-pylint: ci--meta-debug
pylint $(PY_SOURCES)
ci-black: ci--meta-debug
black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug
ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug
codespell $(CODESPELL_ARGS) -s
ci-isort: ci--meta-debug
isort --check $(PY_SOURCES)
ci-bandit: ci--meta-debug
bandit -r $(PY_SOURCES)
ci-pyright: ci--meta-debug
./web/node_modules/.bin/pyright $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug
ak makemigrations --check
install: web-install website-install
poetry install
dev-reset:
dropdb -U postgres -h localhost authentik
createdb -U postgres -h localhost authentik
redis-cli -n 0 flushall
make migrate

View File

@ -15,29 +15,45 @@
## What is authentik?
authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
## Installation
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
## Screenshots
| Light | Dark |
| ----------------------------------------------------------- | ---------------------------------------------------------- |
| ![](https://docs.goauthentik.io/img/screen_apps_light.jpg) | ![](https://docs.goauthentik.io/img/screen_apps_dark.jpg) |
| ![](https://docs.goauthentik.io/img/screen_admin_light.jpg) | ![](https://docs.goauthentik.io/img/screen_admin_dark.jpg) |
| Light | Dark |
| ------------------------------------------------------ | ----------------------------------------------------- |
| ![](https://goauthentik.io/img/screen_apps_light.jpg) | ![](https://goauthentik.io/img/screen_apps_dark.jpg) |
| ![](https://goauthentik.io/img/screen_admin_light.jpg) | ![](https://goauthentik.io/img/screen_admin_dark.jpg) |
## Development
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security
See [SECURITY.md](SECURITY.md)
## Adoption and Contributions
## Sponsors
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
This project is proudly sponsored by:
<p>
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
</a>
</p>
DigitalOcean provides development and testing resources for authentik.
<p>
<a href="https://www.netlify.com">
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
</a>
</p>
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.

View File

@ -1,54 +1,44 @@
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
## Independent audits and pentests
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
## What authentik classifies as a CVE
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
“Weakness in an information system, system security procedures, internal controls, or implementation that could be exploited or triggered by a threat source.”
If it is determined that the issue does qualify as a CVE, a CVE number will be issued to the reporter from GitHub.
Even if the issue is not a CVE, we still greatly appreciate your help in hardening authentik.
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
## Supported Versions
(.x being the latest patch release for each version)
| Version | Supported |
| -------- | --------- |
| 2024.4.x | |
| 2024.6.x | |
| Version | Supported |
| --------- | ------------------ |
| 2022.11.x | :white_check_mark: |
| 2022.12.x | :white_check_mark: |
## Reporting a Vulnerability
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the issue.
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
## Severity levels
## Criticality levels
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
### High
| Score | Severity |
| ---------- | -------- |
| 0.0 | None |
| 0.1 3.9 | Low |
| 4.0 6.9 | Medium |
| 7.0 8.9 | High |
| 9.0 10.0 | Critical |
- Authorization bypass
- Circumvention of policies
### Moderate
- Denial-of-Service attacks
### Low
- Unvalidated redirects
- Issues requiring uncommon setups
## Disclosure process
1. Report from Github or Issue is reported via Email as listed above.
1. Issue is reported via Email as listed above.
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
3. A severity level is assigned.
3. A criticality level is assigned.
4. A fix is created, and if possible tested by the issue reporter.
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
6. An announcement is sent out with a fixed release date and severity level of the issue. The announcement will be sent at least 24 hours before the release of the security fix.
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
7. The fixed version is released for the supported versions.
## Getting security notifications
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.

View File

@ -1,12 +1,12 @@
"""authentik root module"""
"""authentik"""
from os import environ
from typing import Optional
__version__ = "2024.6.0"
__version__ = "2022.12.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
def get_build_hash(fallback: str | None = None) -> str:
def get_build_hash(fallback: Optional[str] = None) -> str:
"""Get build hash"""
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
return fallback if build_hash == "" and fallback else build_hash

View File

@ -1,15 +1,13 @@
"""Meta API"""
from drf_spectacular.utils import extend_schema
from rest_framework.fields import CharField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import PassiveSerializer
from authentik.lib.utils.reflection import get_apps
from authentik.policies.event_matcher.models import model_choices
class AppSerializer(PassiveSerializer):
@ -22,7 +20,7 @@ class AppSerializer(PassiveSerializer):
class AppsViewSet(ViewSet):
"""Read-only view list all installed apps"""
permission_classes = [IsAuthenticated]
permission_classes = [IsAdminUser]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:
@ -31,17 +29,3 @@ class AppsViewSet(ViewSet):
for app in sorted(get_apps(), key=lambda app: app.name):
data.append({"name": app.name, "label": app.verbose_name})
return Response(AppSerializer(data, many=True).data)
class ModelViewSet(ViewSet):
"""Read-only view list all installed models"""
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:
"""Read-only view list all installed models"""
data = []
for name, label in model_choices():
data.append({"name": name, "label": label})
return Response(AppSerializer(data, many=True).data)

View File

@ -1,12 +1,8 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
@ -25,51 +21,45 @@ class CoordinateSerializer(PassiveSerializer):
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
logins_per_1h = SerializerMethodField()
logins_failed_per_1h = SerializerMethodField()
authorizations_per_1h = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
def get_authorizations_per_1h(self, _):
"""Get successful authorizations per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION)
.get_events_per_hour()
)
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
permission_classes = [IsAdminUser]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:

View File

@ -1,54 +1,52 @@
"""authentik administration overview"""
import os
import platform
from datetime import datetime
from ssl import OPENSSL_VERSION
from sys import version as python_version
from typing import TypedDict
from cryptography.hazmat.backends.openssl.backend import backend
from django.utils.timezone import now
from drf_spectacular.utils import extend_schema
from gunicorn import version_info as gunicorn_version
from rest_framework.fields import SerializerMethodField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import get_full_version
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.license import LicenseKey
from authentik.lib.config import CONFIG
from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
from authentik.rbac.permissions import HasPermission
class RuntimeDict(TypedDict):
"""Runtime information"""
python_version: str
gunicorn_version: str
environment: str
architecture: str
platform: str
uname: str
openssl_version: str
openssl_fips_enabled: bool | None
authentik_version: str
class SystemInfoSerializer(PassiveSerializer):
class SystemSerializer(PassiveSerializer):
"""Get system information."""
env = SerializerMethodField()
http_headers = SerializerMethodField()
http_host = SerializerMethodField()
http_is_secure = SerializerMethodField()
runtime = SerializerMethodField()
brand = SerializerMethodField()
tenant = SerializerMethodField()
server_time = SerializerMethodField()
embedded_outpost_disabled = SerializerMethodField()
embedded_outpost_host = SerializerMethodField()
def get_env(self, request: Request) -> dict[str, str]:
"""Get Environment"""
return os.environ.copy()
def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers"""
headers = {}
@ -69,30 +67,22 @@ class SystemInfoSerializer(PassiveSerializer):
def get_runtime(self, request: Request) -> RuntimeDict:
"""Get versions"""
return {
"architecture": platform.machine(),
"authentik_version": get_full_version(),
"environment": get_env(),
"openssl_fips_enabled": (
backend._fips_enabled if LicenseKey.get_total().is_valid() else None
),
"openssl_version": OPENSSL_VERSION,
"platform": platform.platform(),
"python_version": python_version,
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
"environment": get_env(),
"architecture": platform.machine(),
"platform": platform.platform(),
"uname": " ".join(platform.uname()),
}
def get_brand(self, request: Request) -> str:
"""Currently active brand"""
return str(request._request.brand)
def get_tenant(self, request: Request) -> str:
"""Currently active tenant"""
return str(request._request.tenant)
def get_server_time(self, request: Request) -> datetime:
"""Current server time"""
return now()
def get_embedded_outpost_disabled(self, request: Request) -> bool:
"""Whether the embedded outpost is disabled"""
return CONFIG.get_bool("outposts.disable_embedded_outpost", False)
def get_embedded_outpost_host(self, request: Request) -> str:
"""Get the FQDN configured on the embedded outpost"""
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
@ -104,17 +94,11 @@ class SystemInfoSerializer(PassiveSerializer):
class SystemView(APIView):
"""Get system information."""
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
permission_classes = [IsAdminUser]
pagination_class = None
filter_backends = []
serializer_class = SystemInfoSerializer
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
@extend_schema(responses={200: SystemSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemInfoSerializer(request).data)
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
def post(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemInfoSerializer(request).data)
return Response(SystemSerializer(request).data)

View File

@ -0,0 +1,122 @@
"""Tasks API"""
from importlib import import_module
from django.contrib import messages
from django.http.response import Http404
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from structlog.stdlib import get_logger
from authentik.core.api.utils import PassiveSerializer
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
LOGGER = get_logger()
class TaskSerializer(PassiveSerializer):
"""Serialize TaskInfo and TaskResult"""
task_name = CharField()
task_description = CharField()
task_finish_timestamp = DateTimeField(source="finish_time")
status = ChoiceField(
source="result.status.name",
choices=[(x.name, x.name) for x in TaskResultStatus],
)
messages = ListField(source="result.messages")
def to_representation(self, instance):
"""When a new version of authentik adds fields to TaskInfo,
the API will fail with an AttributeError, as the classes
are pickled in cache. In that case, just delete the info"""
try:
return super().to_representation(instance)
except AttributeError: # pragma: no cover
if isinstance(self.instance, list):
for inst in self.instance:
inst.delete()
else:
self.instance.delete()
return {}
class TaskViewSet(ViewSet):
"""Read-only view set that returns all background tasks"""
permission_classes = [IsAdminUser]
serializer_class = TaskSerializer
@extend_schema(
responses={
200: TaskSerializer(many=False),
404: OpenApiResponse(description="Task not found"),
},
parameters=[
OpenApiParameter(
"id",
type=OpenApiTypes.STR,
location=OpenApiParameter.PATH,
required=True,
),
],
)
# pylint: disable=invalid-name
def retrieve(self, request: Request, pk=None) -> Response:
"""Get a single system task"""
task = TaskInfo.by_name(pk)
if not task:
raise Http404
return Response(TaskSerializer(task, many=False).data)
@extend_schema(responses={200: TaskSerializer(many=True)})
def list(self, request: Request) -> Response:
"""List system tasks"""
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
return Response(TaskSerializer(tasks, many=True).data)
@extend_schema(
request=OpenApiTypes.NONE,
responses={
204: OpenApiResponse(description="Task retried successfully"),
404: OpenApiResponse(description="Task not found"),
500: OpenApiResponse(description="Failed to retry task"),
},
parameters=[
OpenApiParameter(
"id",
type=OpenApiTypes.STR,
location=OpenApiParameter.PATH,
required=True,
),
],
)
@action(detail=True, methods=["post"])
# pylint: disable=invalid-name
def retry(self, request: Request, pk=None) -> Response:
"""Retry task"""
task = TaskInfo.by_name(pk)
if not task:
raise Http404
try:
task_module = import_module(task.task_call_module)
task_func = getattr(task_module, task.task_call_func)
LOGGER.debug("Running task", task=task_func)
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
messages.success(
self.request,
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
)
return Response(status=204)
except (ImportError, AttributeError): # pragma: no cover
LOGGER.warning("Failed to run task, remove state", task=task)
# if we get an import error, the module path has probably changed
task.delete()
return Response(status=500)

View File

@ -1,5 +1,4 @@
"""authentik administration overview"""
from django.core.cache import cache
from drf_spectacular.utils import extend_schema
from packaging.version import parse
@ -10,7 +9,7 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import __version__, get_build_hash
from authentik.admin.tasks import VERSION_CACHE_KEY, VERSION_NULL, update_latest_version
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.core.api.utils import PassiveSerializer
@ -19,7 +18,6 @@ class VersionSerializer(PassiveSerializer):
version_current = SerializerMethodField()
version_latest = SerializerMethodField()
version_latest_valid = SerializerMethodField()
build_hash = SerializerMethodField()
outdated = SerializerMethodField()
@ -39,10 +37,6 @@ class VersionSerializer(PassiveSerializer):
return __version__
return version_in_cache
def get_version_latest_valid(self, _) -> bool:
"""Check if latest version is valid"""
return cache.get(VERSION_CACHE_KEY) != VERSION_NULL
def get_outdated(self, instance) -> bool:
"""Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))

View File

@ -1,26 +1,25 @@
"""authentik administration overview"""
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.fields import IntegerField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
class WorkerView(APIView):
"""Get currently connected worker count."""
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
permission_classes = [IsAdminUser]
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
count += 1
return Response({"count": count})

View File

@ -1,5 +1,4 @@
"""authentik admin app config"""
from prometheus_client import Gauge, Info
from authentik.blueprints.apps import ManagedAppConfig
@ -15,3 +14,7 @@ class AuthentikAdminConfig(ManagedAppConfig):
label = "authentik_admin"
verbose_name = "authentik Admin"
default = True
def reconcile_load_admin_signals(self):
"""Load admin signals"""
self.import_module("authentik.admin.signals")

View File

@ -1,5 +1,4 @@
"""authentik admin settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand

View File

@ -1,14 +1,23 @@
"""admin signals"""
from django.dispatch import receiver
from authentik.admin.api.tasks import TaskInfo
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_tasks(sender, **kwargs):
"""Set task gauges"""
for task in TaskInfo.all().values():
task.set_prom_metrics()

View File

@ -1,5 +1,4 @@
"""authentik admin tasks"""
import re
from django.core.cache import cache
@ -12,13 +11,17 @@ from structlog.stdlib import get_logger
from authentik import __version__, get_build_hash
from authentik.admin.apps import PROM_INFO
from authentik.events.models import Event, EventAction, Notification
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
from authentik.events.monitored_tasks import (
MonitoredTask,
TaskResult,
TaskResultStatus,
prefill_task,
)
from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
VERSION_NULL = "0.0.0"
VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
@ -51,13 +54,13 @@ def clear_update_notifications():
notification.delete()
@CELERY_APP.task(bind=True, base=SystemTask)
@CELERY_APP.task(bind=True, base=MonitoredTask)
@prefill_task
def update_latest_version(self: SystemTask):
def update_latest_version(self: MonitoredTask):
"""Update latest version info"""
if CONFIG.get_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
self.set_status(TaskStatus.WARNING, "Version check disabled.")
if CONFIG.y_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
return
try:
response = get_http_session().get(
@ -67,7 +70,9 @@ def update_latest_version(self: SystemTask):
data = response.json()
upstream_version = data.get("stable", {}).get("version")
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
self.set_status(
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
)
_set_prom_info()
# Check if upstream version is newer than what we're running,
# and if no event exists yet, create one.
@ -83,8 +88,8 @@ def update_latest_version(self: SystemTask):
event_dict["message"] = f"Changelog: {match.group()}"
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
except (RequestException, IndexError) as exc:
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
self.set_error(exc)
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
_set_prom_info()

View File

@ -1,5 +1,4 @@
"""test admin api"""
from json import loads
from django.test import TestCase
@ -8,7 +7,8 @@ from django.urls import reverse
from authentik import __version__
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Group, User
from authentik.lib.generators import generate_id
from authentik.core.tasks import clean_expired_models
from authentik.events.monitored_tasks import TaskResultStatus
class TestAdminAPI(TestCase):
@ -16,12 +16,59 @@ class TestAdminAPI(TestCase):
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create(username=generate_id())
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
self.user = User.objects.create(username="test-user")
self.group = Group.objects.create(name="superusers", is_superuser=True)
self.group.users.add(self.user)
self.group.save()
self.client.force_login(self.user)
def test_tasks(self):
"""Test Task API"""
clean_expired_models.delay()
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
def test_tasks_single(self):
"""Test Task API (read single)"""
clean_expired_models.delay()
response = self.client.get(
reverse(
"authentik_api:admin_system_tasks-detail",
kwargs={"pk": "clean_expired_models"},
)
)
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
self.assertEqual(body["task_name"], "clean_expired_models")
response = self.client.get(
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
)
self.assertEqual(response.status_code, 404)
def test_tasks_retry(self):
"""Test Task API (retry)"""
clean_expired_models.delay()
response = self.client.post(
reverse(
"authentik_api:admin_system_tasks-retry",
kwargs={"pk": "clean_expired_models"},
)
)
self.assertEqual(response.status_code, 204)
def test_tasks_retry_404(self):
"""Test Task API (retry, 404)"""
response = self.client.post(
reverse(
"authentik_api:admin_system_tasks-retry",
kwargs={"pk": "qwerqewrqrqewrqewr"},
)
)
self.assertEqual(response.status_code, 404)
def test_version(self):
"""Test Version API"""
response = self.client.get(reverse("authentik_api:admin_version"))
@ -46,11 +93,6 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:apps-list"))
self.assertEqual(response.status_code, 200)
def test_models(self):
"""Test models API"""
response = self.client.get(reverse("authentik_api:models-list"))
self.assertEqual(response.status_code, 200)
@reconcile_app("authentik_outposts")
def test_system(self):
"""Test system API"""

View File

@ -1,5 +1,4 @@
"""test admin tasks"""
from django.core.cache import cache
from django.test import TestCase
from requests_mock import Mocker

View File

@ -1,22 +0,0 @@
"""API URLs"""
from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView
from authentik.admin.api.version import VersionView
from authentik.admin.api.workers import WorkerView
api_urlpatterns = [
("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"),
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"),
]

View File

@ -10,3 +10,26 @@ class AuthentikAPIConfig(AppConfig):
label = "authentik_api"
mountpoint = "api/"
verbose_name = "authentik API"
def ready(self) -> None:
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from authentik.api.authentication import TokenAuthentication
# Class is defined here as it needs to be created early enough that drf-spectacular will
# find it, but also won't cause any import issues
# pylint: disable=unused-variable
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {
"type": "apiKey",
"in": "header",
"name": "Authorization",
"scheme": "bearer",
}

View File

@ -1,10 +1,7 @@
"""API Authentication"""
from hmac import compare_digest
from typing import Any
from typing import Any, Optional
from django.conf import settings
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request
@ -18,7 +15,7 @@ from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger()
def validate_auth(header: bytes) -> str | None:
def validate_auth(header: bytes) -> Optional[str]:
"""Validate that the header is in a correct format,
returns type and credentials"""
auth_credentials = header.decode().strip()
@ -33,19 +30,9 @@ def validate_auth(header: bytes) -> str | None:
return auth_credentials
def bearer_auth(raw_header: bytes) -> User | None:
def bearer_auth(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
user = auth_user_lookup(raw_header)
if not user:
return None
if not user.is_active:
raise AuthenticationFailed("Token invalid/expired")
return user
def auth_user_lookup(raw_header: bytes) -> User | None:
"""raw_header in the Format of `Bearer ....`"""
from authentik.providers.oauth2.models import AccessToken
from authentik.providers.oauth2.models import RefreshToken
auth_credentials = validate_auth(raw_header)
if not auth_credentials:
@ -58,8 +45,8 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
CTX_AUTH_VIA.set("api_token")
return key_token.user
# then try to auth via JWT
jwt_token = AccessToken.filter_not_expired(
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
jwt_token = RefreshToken.filter_not_expired(
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
).first()
if jwt_token:
# Double-check scopes, since they are saved in a single string
@ -76,12 +63,12 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
raise AuthenticationFailed("Token invalid/expired")
def token_secret_key(value: str) -> User | None:
def token_secret_key(value: str) -> Optional[User]:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
from authentik.outposts.apps import MANAGED_OUTPOST
if not compare_digest(value, settings.SECRET_KEY):
if value != settings.SECRET_KEY:
return None
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts:
@ -103,14 +90,3 @@ class TokenAuthentication(BaseAuthentication):
return None
return (user, None) # pragma: no cover
class TokenSchema(OpenApiAuthenticationExtension):
"""Auth schema"""
target_class = TokenAuthentication
name = "authentik"
def get_security_definition(self, auto_schema):
"""Auth schema"""
return {"type": "http", "scheme": "bearer"}

View File

@ -1,5 +1,4 @@
"""API Authorization"""
from django.conf import settings
from django.db.models import Model
from django.db.models.query import QuerySet
@ -8,9 +7,9 @@ from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from rest_framework_guardian.filters import ObjectPermissionsFilter
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
@ -27,14 +26,14 @@ class OwnerFilter(BaseFilterBackend):
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)

View File

@ -0,0 +1,35 @@
"""API Decorators"""
from functools import wraps
from typing import Callable, Optional
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
LOGGER = get_logger()
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
"""Check permissions for a single custom action"""
def wrapper_outter(func: Callable):
"""Check permissions for a single custom action"""
@wraps(func)
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
if perm:
obj = self.get_object()
if not request.user.has_perm(perm, obj):
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
return self.permission_denied(request)
if other_perms:
for other_perm in other_perms:
if not request.user.has_perm(other_perm):
LOGGER.debug("denying access for other", user=request.user, perm=perm)
return self.permission_denied(request)
return func(self, request, *args, **kwargs)
return wrapper
return wrapper_outter

View File

@ -1,45 +1,7 @@
"""Pagination which includes total pages and current page"""
from rest_framework import pagination
from rest_framework.response import Response
PAGINATION_COMPONENT_NAME = "Pagination"
PAGINATION_SCHEMA = {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
}
class Pagination(pagination.PageNumberPagination):
"""Pagination which includes total pages and current page"""
@ -73,15 +35,42 @@ class Pagination(pagination.PageNumberPagination):
return {
"type": "object",
"properties": {
"pagination": {"$ref": f"#/components/schemas/{PAGINATION_COMPONENT_NAME}"},
"pagination": {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
},
"results": schema,
},
"required": ["pagination", "results"],
}
class SmallerPagination(Pagination):
"""Smaller pagination for objects which might require a lot of queries
to retrieve all data for."""
max_page_size = 10

View File

@ -1,7 +1,5 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_array_type,
@ -10,10 +8,6 @@ from drf_spectacular.plumbing import (
)
from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from authentik.api.apps import AuthentikAPIConfig
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
def build_standard_type(obj, **kwargs):
@ -34,7 +28,7 @@ GENERIC_ERROR = build_object_type(
VALIDATION_ERROR = build_object_type(
description=_("Validation Error"),
properties={
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
"code": build_standard_type(OpenApiTypes.STR),
},
required=[],
@ -42,19 +36,7 @@ VALIDATION_ERROR = build_object_type(
)
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA):
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints.
Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
@ -62,10 +44,19 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
<https://github.com/tfranzel/drf-spectacular/issues/101>.
"""
create_component(generator, PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA)
def create_component(name, schema, type_=ResolvedComponent.SCHEMA):
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
generic_error = create_component(generator, "GenericError", GENERIC_ERROR)
validation_error = create_component(generator, "ValidationError", VALIDATION_ERROR)
generic_error = create_component("GenericError", GENERIC_ERROR)
validation_error = create_component("ValidationError", VALIDATION_ERROR)
for path in result["paths"].values():
for method in path.values():
@ -102,12 +93,3 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]

View File

@ -1,17 +1,88 @@
{% extends "base/skeleton.html" %}
{% load authentik_core %}
{% load static %}
{% block title %}
API Browser - {{ brand.branding_title }}
API Browser - {{ tenant.branding_title }}
{% endblock %}
{% block head %}
{% versioned_script "dist/standalone/api-browser/index-%v.js" %}
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
<script>
function getCookie(name) {
let cookieValue = "";
if (document.cookie && document.cookie !== "") {
const cookies = document.cookie.split(";");
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === name + "=") {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
window.addEventListener('DOMContentLoaded', (event) => {
const rapidocEl = document.querySelector('rapi-doc');
rapidocEl.addEventListener('before-try', (e) => {
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
});
});
</script>
<style>
img.logo {
width: 100%;
padding: 1rem 0.5rem 1.5rem 0.5rem;
min-height: 48px;
}
</style>
{% endblock %}
{% block body %}
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
<rapi-doc
spec-url="{{ path }}"
heading-text=""
theme="light"
render-style="read"
default-schema-tab="schema"
primary-color="#fd4b2d"
nav-bg-color="#212427"
bg-color="#000000"
text-color="#000000"
nav-text-color="#ffffff"
nav-hover-bg-color="#3c3f42"
nav-accent-color="#4f5255"
nav-hover-text-color="#ffffff"
use-path-in-nav-bar="true"
nav-item-spacing="relaxed"
allow-server-selection="false"
show-header="false"
allow-spec-url-load="false"
allow-spec-file-load="false">
<div slot="nav-logo">
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
</div>
</rapi-doc>
<script>
const rapidoc = document.querySelector("rapi-doc");
const matcher = window.matchMedia("(prefers-color-scheme: light)");
const changer = (ev) => {
const style = getComputedStyle(document.documentElement);
let bg, text = "";
if (matcher.matches) {
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
text = style.getPropertyValue('--pf-global--Color--300');
} else {
bg = style.getPropertyValue('--ak-dark-background');
text = style.getPropertyValue('--ak-dark-foreground');
}
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
rapidoc.requestUpdate();
};
matcher.addEventListener("change", changer);
window.addEventListener("load", changer);
</script>
{% endblock %}

View File

@ -1,22 +1,18 @@
"""Test API Authentication"""
import json
from base64 import b64encode
from django.conf import settings
from django.test import TestCase
from django.utils import timezone
from guardian.shortcuts import get_anonymous_user
from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Token, TokenIntents, User, UserTypes
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
class TestAPIAuth(TestCase):
@ -25,75 +21,58 @@ class TestAPIAuth(TestCase):
def test_invalid_type(self):
"""Test invalid type"""
with self.assertRaises(AuthenticationFailed):
bearer_auth(b"foo bar")
bearer_auth("foo bar".encode())
def test_invalid_empty(self):
"""Test invalid type"""
self.assertIsNone(bearer_auth(b"Bearer "))
self.assertIsNone(bearer_auth(b""))
self.assertIsNone(bearer_auth("Bearer ".encode()))
self.assertIsNone(bearer_auth("".encode()))
def test_invalid_no_token(self):
"""Test invalid with no token"""
with self.assertRaises(AuthenticationFailed):
auth = b64encode(b":abc").decode()
auth = b64encode(":abc".encode()).decode()
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
def test_bearer_valid(self):
"""Test valid token"""
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
def test_bearer_valid_deactivated(self):
"""Test valid token"""
user = create_test_admin_user()
user.is_active = False
user.save()
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {token.key}".encode())
@reconcile_app("authentik_outposts")
def test_managed_outpost_fail(self):
def test_managed_outpost(self):
"""Test managed outpost"""
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
outpost.user.delete()
outpost.delete()
with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
@reconcile_app("authentik_outposts")
def test_managed_outpost_success(self):
"""Test managed outpost"""
user: User = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
self.assertEqual(user.type, UserTypes.INTERNAL_SERVICE_ACCOUNT)
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
def test_jwt_valid(self):
"""Test valid JWT"""
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = AccessToken.objects.create(
user=create_test_admin_user(),
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
token=generate_id(),
auth_time=timezone.now(),
refresh_token=generate_id(),
_scope=SCOPE_AUTHENTIK_API,
_id_token=json.dumps({}),
)
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
def test_jwt_missing_scope(self):
"""Test valid JWT"""
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = AccessToken.objects.create(
user=create_test_admin_user(),
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
token=generate_id(),
auth_time=timezone.now(),
refresh_token=generate_id(),
_scope="",
_id_token=json.dumps({}),
)
with self.assertRaises(AuthenticationFailed):
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)

View File

@ -1,5 +1,4 @@
"""Test config API"""
from json import loads
from django.urls import reverse

View File

@ -0,0 +1,33 @@
"""test decorators api"""
from django.urls import reverse
from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Application, User
class TestAPIDecorators(APITestCase):
"""test decorators api"""
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create(username="test-user")
def test_obj_perm_denied(self):
"""Test object perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name="denied", slug="denied")
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)
def test_other_perm_denied(self):
"""Test other perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name="denied", slug="denied")
assign_perm("authentik_core.view_application", self.user, app)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
self.assertEqual(response.status_code, 403)

View File

@ -1,5 +1,4 @@
"""Schema generation tests"""
from django.urls import reverse
from rest_framework.test import APITestCase
from yaml import safe_load

View File

@ -1,6 +1,5 @@
"""authentik API Modelviewset tests"""
from collections.abc import Callable
from typing import Callable
from django.test import TestCase
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
@ -17,7 +16,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
def tester(self: TestModelViewSets):
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
filterset_class = getattr(test_viewset, "filterset_class", None)
if not filterset_class:
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
@ -26,6 +24,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
for _, viewset, _ in router.registry:
if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet):
if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
continue
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))

View File

@ -1,5 +1,4 @@
"""authentik api urls"""
from django.urls import include, path
from authentik.api.v3.urls import urlpatterns as v3_urls

View File

@ -1,10 +1,8 @@
"""core Configs API"""
from pathlib import Path
from os import path
from django.conf import settings
from django.db import models
from django.dispatch import Signal
from drf_spectacular.utils import extend_schema
from rest_framework.fields import (
BooleanField,
@ -20,21 +18,17 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.context_processors.base import get_context_processors
from authentik.events.geo import GEOIP_READER
from authentik.lib.config import CONFIG
capabilities = Signal()
class Capabilities(models.TextChoices):
"""Define capabilities which influence which APIs can/should be used"""
CAN_SAVE_MEDIA = "can_save_media"
CAN_GEO_IP = "can_geo_ip"
CAN_ASN = "can_asn"
CAN_IMPERSONATE = "can_impersonate"
CAN_DEBUG = "can_debug"
IS_ENTERPRISE = "is_enterprise"
class ErrorReportingConfigSerializer(PassiveSerializer):
@ -68,24 +62,14 @@ class ConfigView(APIView):
"""Get all capabilities this server instance supports"""
caps = []
deb_test = settings.DEBUG or settings.TEST
if (
CONFIG.get("storage.media.backend", "file") == "s3"
or Path(settings.STORAGES["default"]["OPTIONS"]["location"]).is_mount()
or deb_test
):
if path.ismount(settings.MEDIA_ROOT) or deb_test:
caps.append(Capabilities.CAN_SAVE_MEDIA)
for processor in get_context_processors():
if cap := processor.capability():
caps.append(cap)
if self.request.tenant.impersonation:
if GEOIP_READER.enabled:
caps.append(Capabilities.CAN_GEO_IP)
if CONFIG.y_bool("impersonation"):
caps.append(Capabilities.CAN_IMPERSONATE)
if settings.DEBUG: # pragma: no cover
caps.append(Capabilities.CAN_DEBUG)
if "authentik.enterprise" in settings.INSTALLED_APPS:
caps.append(Capabilities.IS_ENTERPRISE)
for _, result in capabilities.send(sender=self):
if result:
caps.append(result)
return caps
def get_config(self) -> ConfigSerializer:
@ -93,17 +77,17 @@ class ConfigView(APIView):
return ConfigSerializer(
{
"error_reporting": {
"enabled": CONFIG.get("error_reporting.enabled"),
"sentry_dsn": CONFIG.get("error_reporting.sentry_dsn"),
"environment": CONFIG.get("error_reporting.environment"),
"send_pii": CONFIG.get("error_reporting.send_pii"),
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
"enabled": CONFIG.y("error_reporting.enabled"),
"sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
"environment": CONFIG.y("error_reporting.environment"),
"send_pii": CONFIG.y("error_reporting.send_pii"),
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
},
"capabilities": self.get_capabilities(),
"cache_timeout": CONFIG.get_int("cache.timeout"),
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
"cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
}
)

View File

@ -1,58 +1,259 @@
"""api v3 urls"""
from importlib import import_module
from django.urls import path
from django.urls.resolvers import URLPattern
from django.views.decorators.cache import cache_page
from drf_spectacular.views import SpectacularAPIView
from rest_framework import routers
from structlog.stdlib import get_logger
from authentik.admin.api.meta import AppsViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView
from authentik.admin.api.tasks import TaskViewSet
from authentik.admin.api.version import VersionView
from authentik.admin.api.workers import WorkerView
from authentik.api.v3.config import ConfigView
from authentik.api.views import APIBrowserView
from authentik.lib.utils.reflection import get_apps
LOGGER = get_logger()
from authentik.blueprints.api import BlueprintInstanceViewSet
from authentik.core.api.applications import ApplicationViewSet
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
from authentik.core.api.groups import GroupViewSet
from authentik.core.api.propertymappings import PropertyMappingViewSet
from authentik.core.api.providers import ProviderViewSet
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.users import UserViewSet
from authentik.crypto.api import CertificateKeyPairViewSet
from authentik.events.api.events import EventViewSet
from authentik.events.api.notification_mappings import NotificationWebhookMappingViewSet
from authentik.events.api.notification_rules import NotificationRuleViewSet
from authentik.events.api.notification_transports import NotificationTransportViewSet
from authentik.events.api.notifications import NotificationViewSet
from authentik.flows.api.bindings import FlowStageBindingViewSet
from authentik.flows.api.flows import FlowViewSet
from authentik.flows.api.stages import StageViewSet
from authentik.flows.views.executor import FlowExecutorView
from authentik.flows.views.inspector import FlowInspectorView
from authentik.outposts.api.outposts import OutpostViewSet
from authentik.outposts.api.service_connections import (
DockerServiceConnectionViewSet,
KubernetesServiceConnectionViewSet,
ServiceConnectionViewSet,
)
from authentik.policies.api.bindings import PolicyBindingViewSet
from authentik.policies.api.policies import PolicyViewSet
from authentik.policies.dummy.api import DummyPolicyViewSet
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
from authentik.policies.expression.api import ExpressionPolicyViewSet
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
from authentik.policies.password.api import PasswordPolicyViewSet
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
from authentik.providers.saml.api.providers import SAMLProviderViewSet
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
from authentik.sources.oauth.api.source import OAuthSourceViewSet
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
from authentik.sources.plex.api.source import PlexSourceViewSet
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
from authentik.sources.saml.api.source import SAMLSourceViewSet
from authentik.sources.saml.api.source_connection import UserSAMLSourceConnectionViewSet
from authentik.stages.authenticator_duo.api import (
AuthenticatorDuoStageViewSet,
DuoAdminDeviceViewSet,
DuoDeviceViewSet,
)
from authentik.stages.authenticator_sms.api import (
AuthenticatorSMSStageViewSet,
SMSAdminDeviceViewSet,
SMSDeviceViewSet,
)
from authentik.stages.authenticator_static.api import (
AuthenticatorStaticStageViewSet,
StaticAdminDeviceViewSet,
StaticDeviceViewSet,
)
from authentik.stages.authenticator_totp.api import (
AuthenticatorTOTPStageViewSet,
TOTPAdminDeviceViewSet,
TOTPDeviceViewSet,
)
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageViewSet
from authentik.stages.authenticator_webauthn.api import (
AuthenticateWebAuthnStageViewSet,
WebAuthnAdminDeviceViewSet,
WebAuthnDeviceViewSet,
)
from authentik.stages.captcha.api import CaptchaStageViewSet
from authentik.stages.consent.api import ConsentStageViewSet, UserConsentViewSet
from authentik.stages.deny.api import DenyStageViewSet
from authentik.stages.dummy.api import DummyStageViewSet
from authentik.stages.email.api import EmailStageViewSet
from authentik.stages.identification.api import IdentificationStageViewSet
from authentik.stages.invitation.api import InvitationStageViewSet, InvitationViewSet
from authentik.stages.password.api import PasswordStageViewSet
from authentik.stages.prompt.api import PromptStageViewSet, PromptViewSet
from authentik.stages.user_delete.api import UserDeleteStageViewSet
from authentik.stages.user_login.api import UserLoginStageViewSet
from authentik.stages.user_logout.api import UserLogoutStageViewSet
from authentik.stages.user_write.api import UserWriteStageViewSet
from authentik.tenants.api import TenantViewSet
router = routers.DefaultRouter()
router.include_format_suffixes = False
_other_urls = []
for _authentik_app in get_apps():
try:
api_urls = import_module(f"{_authentik_app.name}.urls")
except ModuleNotFoundError:
continue
except ImportError as exc:
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
continue
if not hasattr(api_urls, "api_urlpatterns"):
LOGGER.debug(
"App does not define API URLs",
app_name=_authentik_app.name,
)
continue
urls: list = api_urls.api_urlpatterns
for url in urls:
if isinstance(url, URLPattern):
_other_urls.append(url)
else:
router.register(*url)
LOGGER.debug(
"Mounted API URLs",
app_name=_authentik_app.name,
)
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
router.register("admin/apps", AppsViewSet, basename="apps")
router.register("core/authenticated_sessions", AuthenticatedSessionViewSet)
router.register("core/applications", ApplicationViewSet)
router.register("core/groups", GroupViewSet)
router.register("core/users", UserViewSet)
router.register("core/user_consent", UserConsentViewSet)
router.register("core/tokens", TokenViewSet)
router.register("core/tenants", TenantViewSet)
router.register("outposts/instances", OutpostViewSet)
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
router.register("flows/instances", FlowViewSet)
router.register("flows/bindings", FlowStageBindingViewSet)
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
router.register("events/events", EventViewSet)
router.register("events/notifications", NotificationViewSet)
router.register("events/transports", NotificationTransportViewSet)
router.register("events/rules", NotificationRuleViewSet)
router.register("managed/blueprints", BlueprintInstanceViewSet)
router.register("sources/all", SourceViewSet)
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
router.register("sources/user_connections/saml", UserSAMLSourceConnectionViewSet)
router.register("sources/ldap", LDAPSourceViewSet)
router.register("sources/saml", SAMLSourceViewSet)
router.register("sources/oauth", OAuthSourceViewSet)
router.register("sources/plex", PlexSourceViewSet)
router.register("policies/all", PolicyViewSet)
router.register("policies/bindings", PolicyBindingViewSet)
router.register("policies/expression", ExpressionPolicyViewSet)
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
router.register("policies/password", PasswordPolicyViewSet)
router.register("policies/reputation/scores", ReputationViewSet)
router.register("policies/reputation", ReputationPolicyViewSet)
router.register("providers/all", ProviderViewSet)
router.register("providers/ldap", LDAPProviderViewSet)
router.register("providers/proxy", ProxyProviderViewSet)
router.register("providers/oauth2", OAuth2ProviderViewSet)
router.register("providers/saml", SAMLProviderViewSet)
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
router.register("propertymappings/all", PropertyMappingViewSet)
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
router.register("propertymappings/scope", ScopeMappingViewSet)
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
router.register("authenticators/all", DeviceViewSet, basename="device")
router.register("authenticators/duo", DuoDeviceViewSet)
router.register("authenticators/sms", SMSDeviceViewSet)
router.register("authenticators/static", StaticDeviceViewSet)
router.register("authenticators/totp", TOTPDeviceViewSet)
router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
router.register(
"authenticators/admin/all",
AdminDeviceViewSet,
basename="admin-device",
)
router.register(
"authenticators/admin/duo",
DuoAdminDeviceViewSet,
basename="admin-duodevice",
)
router.register(
"authenticators/admin/sms",
SMSAdminDeviceViewSet,
basename="admin-smsdevice",
)
router.register(
"authenticators/admin/static",
StaticAdminDeviceViewSet,
basename="admin-staticdevice",
)
router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice")
router.register(
"authenticators/admin/webauthn",
WebAuthnAdminDeviceViewSet,
basename="admin-webauthndevice",
)
router.register("stages/all", StageViewSet)
router.register("stages/authenticator/duo", AuthenticatorDuoStageViewSet)
router.register("stages/authenticator/sms", AuthenticatorSMSStageViewSet)
router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet)
router.register("stages/authenticator/totp", AuthenticatorTOTPStageViewSet)
router.register("stages/authenticator/validate", AuthenticatorValidateStageViewSet)
router.register("stages/authenticator/webauthn", AuthenticateWebAuthnStageViewSet)
router.register("stages/captcha", CaptchaStageViewSet)
router.register("stages/consent", ConsentStageViewSet)
router.register("stages/deny", DenyStageViewSet)
router.register("stages/email", EmailStageViewSet)
router.register("stages/identification", IdentificationStageViewSet)
router.register("stages/invitation/invitations", InvitationViewSet)
router.register("stages/invitation/stages", InvitationStageViewSet)
router.register("stages/password", PasswordStageViewSet)
router.register("stages/prompt/prompts", PromptViewSet)
router.register("stages/prompt/stages", PromptStageViewSet)
router.register("stages/user_delete", UserDeleteStageViewSet)
router.register("stages/user_login", UserLoginStageViewSet)
router.register("stages/user_logout", UserLogoutStageViewSet)
router.register("stages/user_write", UserWriteStageViewSet)
router.register("stages/dummy", DummyStageViewSet)
router.register("policies/dummy", DummyPolicyViewSet)
urlpatterns = (
[
path("", APIBrowserView.as_view(), name="schema-browser"),
]
+ router.urls
+ _other_urls
+ [
path(
"admin/metrics/",
AdministrationMetricsViewSet.as_view(),
name="admin_metrics",
),
path("admin/version/", VersionView.as_view(), name="admin_version"),
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
path("admin/system/", SystemView.as_view(), name="admin_system"),
path("root/config/", ConfigView.as_view(), name="config"),
path(
"flows/executor/<slug:flow_slug>/",
FlowExecutorView.as_view(),
name="flow-executor",
),
path(
"flows/inspector/<slug:flow_slug>/",
FlowInspectorView.as_view(),
name="flow-inspector",
),
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
]
)

View File

@ -1,5 +1,4 @@
"""General API Views"""
from typing import Any
from django.urls import reverse

View File

@ -1,22 +1,19 @@
"""Serializer mixin for managed models"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField
from rest_framework.fields import CharField, DateTimeField, JSONField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, PassiveSerializer
from authentik.rbac.decorators import permission_required
from authentik.core.api.utils import PassiveSerializer
class ManagedSerializer:
@ -29,40 +26,22 @@ class MetadataSerializer(PassiveSerializer):
"""Serializer for blueprint metadata"""
name = CharField()
labels = JSONDictField()
labels = JSONField()
class BlueprintInstanceSerializer(ModelSerializer):
"""Info about a single blueprint instance file"""
def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX):
return path
files: list[dict] = blueprints_find_dict.delay().get()
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
"""Ensure the path specified is retrievable"""
try:
BlueprintInstance(path=path).retrieve()
except BlueprintRetrievalFailed as exc:
raise ValidationError(exc) from exc
return path
def validate_content(self, content: str) -> str:
"""Ensure content (if set) is a valid blueprint"""
if content == "":
return content
context = self.instance.context if self.instance else {}
valid, logs = Importer.from_string(content, context).validate()
if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError(
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
)
return content
def validate(self, attrs: dict) -> dict:
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
raise ValidationError(_("Either path or content must be set."))
return super().validate(attrs)
class Meta:
model = BlueprintInstance
fields = [
"pk",
@ -75,7 +54,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
"enabled",
"managed_models",
"metadata",
"content",
]
extra_kwargs = {
"status": {"read_only": True},
@ -89,11 +67,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
permission_classes = [IsAdminUser]
serializer_class = BlueprintInstanceSerializer
queryset = BlueprintInstance.objects.all()
search_fields = ["name", "path"]
filterset_fields = ["name", "path"]
ordering = ["name"]
@extend_schema(
responses={

View File

@ -1,6 +1,5 @@
"""authentik Blueprints app"""
from collections.abc import Callable
from importlib import import_module
from inspect import ismethod
@ -8,100 +7,40 @@ from django.apps import AppConfig
from django.db import DatabaseError, InternalError, ProgrammingError
from structlog.stdlib import BoundLogger, get_logger
from authentik.root.signals import startup
class ManagedAppConfig(AppConfig):
"""Basic reconciliation logic for apps"""
logger: BoundLogger
RECONCILE_GLOBAL_CATEGORY: str = "global"
RECONCILE_TENANT_CATEGORY: str = "tenant"
_logger: BoundLogger
def __init__(self, app_name: str, *args, **kwargs) -> None:
super().__init__(app_name, *args, **kwargs)
self.logger = get_logger().bind(app_name=app_name)
self._logger = get_logger().bind(app_name=app_name)
def ready(self) -> None:
self.import_related()
startup.connect(self._on_startup_callback, dispatch_uid=self.label)
self.reconcile()
return super().ready()
def _on_startup_callback(self, sender, **_):
self._reconcile_global()
self._reconcile_tenant()
def import_related(self):
"""Automatically import related modules which rely on just being imported
to register themselves (mainly django signals and celery tasks)"""
def import_relative(rel_module: str):
try:
module_name = f"{self.name}.{rel_module}"
import_module(module_name)
self.logger.info("Imported related module", module=module_name)
except ModuleNotFoundError:
pass
import_relative("checks")
import_relative("tasks")
import_relative("signals")
def import_module(self, path: str):
"""Load module"""
import_module(path)
def _reconcile(self, prefix: str) -> None:
def reconcile(self) -> None:
"""reconcile ourselves"""
prefix = "reconcile_"
for meth_name in dir(self):
meth = getattr(self, meth_name)
if not ismethod(meth):
continue
category = getattr(meth, "_authentik_managed_reconcile", None)
if category != prefix:
if not meth_name.startswith(prefix):
continue
name = meth_name.replace(prefix, "")
try:
self.logger.debug("Starting reconciler", name=name)
self._logger.debug("Starting reconciler", name=name)
meth()
self.logger.debug("Successfully reconciled", name=name)
self._logger.debug("Successfully reconciled", name=name)
except (DatabaseError, ProgrammingError, InternalError) as exc:
self.logger.warning("Failed to run reconcile", name=name, exc=exc)
@staticmethod
def reconcile_tenant(func: Callable):
"""Mark a function to be called on startup (for each tenant)"""
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_TENANT_CATEGORY
return func
@staticmethod
def reconcile_global(func: Callable):
"""Mark a function to be called on startup (globally)"""
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
return func
def _reconcile_tenant(self) -> None:
"""reconcile ourselves for tenanted methods"""
from authentik.tenants.models import Tenant
try:
tenants = list(Tenant.objects.filter(ready=True))
except (DatabaseError, ProgrammingError, InternalError) as exc:
self.logger.debug("Failed to get tenants to run reconcile", exc=exc)
return
for tenant in tenants:
with tenant:
self._reconcile(self.RECONCILE_TENANT_CATEGORY)
def _reconcile_global(self) -> None:
"""
reconcile ourselves for global methods.
Used for signals, tasks, etc. Database queries should not be made in here.
"""
from django_tenants.utils import get_public_schema_name, schema_context
with schema_context(get_public_schema_name()):
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
class AuthentikBlueprintsConfig(ManagedAppConfig):
@ -112,18 +51,15 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
verbose_name = "authentik Blueprints"
default = True
@ManagedAppConfig.reconcile_global
def load_blueprints_v1_tasks(self):
def reconcile_load_blueprints_v1_tasks(self):
"""Load v1 tasks"""
self.import_module("authentik.blueprints.v1.tasks")
@ManagedAppConfig.reconcile_tenant
def blueprints_discovery(self):
def reconcile_blueprints_discover(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
from authentik.blueprints.v1.tasks import blueprints_discover
blueprints_discovery.delay()
clear_failed_blueprints.delay()
blueprints_discover.delay()
def import_models(self):
super().import_models()

View File

@ -1,5 +1,4 @@
"""Apply blueprint from commandline"""
from sys import exit as sys_exit
from django.core.management.base import BaseCommand, no_translations
@ -7,7 +6,6 @@ from structlog.stdlib import get_logger
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
from authentik.tenants.models import Tenant
LOGGER = get_logger()
@ -18,16 +16,16 @@ class Command(BaseCommand):
@no_translations
def handle(self, *args, **options):
"""Apply all blueprints in order, abort when one fails to import"""
for tenant in Tenant.objects.filter(ready=True):
with tenant:
for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer.from_string(content)
valid, _ = importer.validate()
if not valid:
self.stderr.write("blueprint invalid")
sys_exit(1)
importer.apply()
for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer(content)
valid, logs = importer.validate()
if not valid:
for log in logs:
getattr(LOGGER, log.pop("log_level"))(**log)
self.stderr.write("blueprint invalid")
sys_exit(1)
importer.apply()
def add_arguments(self, parser):
parser.add_argument("blueprints", nargs="+", type=str)

View File

@ -1,19 +1,17 @@
"""Export blueprint of current authentik install"""
from django.core.management.base import no_translations
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.v1.exporter import Exporter
from authentik.tenants.management import TenantCommand
LOGGER = get_logger()
class Command(TenantCommand):
class Command(BaseCommand):
"""Export blueprint of current authentik install"""
@no_translations
def handle_per_tenant(self, *args, **options):
def handle(self, *args, **options):
"""Export blueprint of current authentik install"""
exporter = Exporter()
self.stdout.write(exporter.export_to_string())

View File

@ -1,187 +1,36 @@
"""Generate JSON Schema for blueprints"""
from json import dumps
from typing import Any
from json import dumps, loads
from pathlib import Path
from django.core.management.base import BaseCommand, no_translations
from django.db.models import Model, fields
from drf_jsonschema_serializer.convert import converter, field_to_converter
from rest_framework.fields import Field, JSONField, UUIDField
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik import __version__
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
from authentik.lib.models import SerializerModel
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.blueprints.v1.meta.registry import registry
LOGGER = get_logger()
@converter
class PrimaryKeyRelatedFieldConverter:
"""Custom primary key field converter which is aware of non-integer based PKs
This is not an exhaustive fix for other non-int PKs, however in authentik we either
use UUIDs or ints"""
field_class = PrimaryKeyRelatedField
def convert(self, field: PrimaryKeyRelatedField):
model: Model = field.queryset.model
pk_field = model._meta.pk
if isinstance(pk_field, fields.UUIDField):
return {"type": "string", "format": "uuid"}
return {"type": "integer"}
class Command(BaseCommand):
"""Generate JSON Schema for blueprints"""
schema: dict
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.schema = {
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object",
"title": f"authentik {__version__} Blueprint schema",
"required": ["version", "entries"],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1,
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": ["name"],
"properties": {
"name": {"type": "string"},
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
},
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": True,
},
"entries": {
"type": "array",
"items": {
"oneOf": [],
},
},
},
"$defs": {},
}
@no_translations
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
self.build()
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
path = Path(__file__).parent.joinpath("./schema_template.json")
with open(path, "r", encoding="utf-8") as _template_file:
self.schema = loads(_template_file.read())
self.set_model_allowed()
self.stdout.write(dumps(self.schema, indent=4))
@staticmethod
def json_default(value: Any) -> Any:
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
return str(value)
def build(self):
"""Build all models into the schema"""
def set_model_allowed(self):
"""Set model enum"""
model_names = []
for model in registry.get_models():
if issubclass(model, BaseMetaModel):
serializer_class = model.serializer()
else:
if model._meta.abstract:
continue
if not is_model_allowed(model):
continue
model_instance: Model = model()
if not isinstance(model_instance, SerializerModel):
continue
serializer_class = model_instance.serializer
serializer = serializer_class(
context={
SERIALIZER_CONTEXT_BLUEPRINT: False,
}
)
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, serializer)
)
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
"""Template entry for a single model"""
model_schema = self.to_jsonschema(serializer)
model_schema["required"] = []
def_name = f"model_{model_path}"
def_path = f"#/$defs/{def_name}"
self.schema["$defs"][def_name] = model_schema
return {
"type": "object",
"required": ["model", "identifiers"],
"properties": {
"model": {"const": model_path},
"id": {"type": "string"},
"state": {
"type": "string",
"enum": [s.value for s in BlueprintEntryDesiredState],
"default": "present",
},
"conditions": {"type": "array", "items": {"type": "boolean"}},
"attrs": {"$ref": def_path},
"identifiers": {"$ref": def_path},
},
}
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""
if isinstance(field, Serializer):
result = self.to_jsonschema(field)
else:
try:
converter = field_to_converter[field]
result = converter.convert(field)
except KeyError:
if isinstance(field, JSONField):
result = {"type": "object", "additionalProperties": True}
elif isinstance(field, UUIDField):
result = {"type": "string", "format": "uuid"}
else:
raise
if field.label:
result["title"] = field.label
if field.help_text:
result["description"] = field.help_text
return self.clean_result(result)
def clean_result(self, result: dict) -> dict:
"""Remove enumNames from result, recursively"""
result.pop("enumNames", None)
for key, value in result.items():
if isinstance(value, dict):
result[key] = self.clean_result(value)
return result
def to_jsonschema(self, serializer: Serializer) -> dict:
"""Convert serializer to json schema"""
properties = {}
required = []
for name, field in serializer.fields.items():
if field.read_only:
if not is_model_allowed(model):
continue
sub_schema = self.field_to_jsonschema(field)
if field.required:
required.append(name)
properties[name] = sub_schema
result = {"type": "object", "properties": properties}
if required:
result["required"] = required
return result
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
model_names.sort()
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names

View File

@ -0,0 +1,105 @@
{
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "http://example.com/example.json",
"type": "object",
"title": "authentik Blueprint schema",
"default": {},
"required": [
"version",
"entries"
],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"labels": {
"type": "object"
}
}
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": true
},
"entries": {
"type": "array",
"items": {
"$id": "#entry",
"type": "object",
"required": [
"model"
],
"properties": {
"model": {
"type": "string",
"enum": [
"placeholder"
]
},
"id": {
"type": "string"
},
"state": {
"type": "string",
"enum": [
"absent",
"present",
"created"
],
"default": "present"
},
"conditions": {
"type": "array",
"items": {
"type": "boolean"
}
},
"attrs": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Commonly available field, may not exist on all models"
}
},
"default": {},
"additionalProperties": true
},
"identifiers": {
"type": "object",
"default": {},
"properties": {
"pk": {
"description": "Commonly available field, may not exist on all models",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"format": "uuid"
}
]
}
},
"additionalProperties": true
}
}
}
}
}
}

View File

@ -6,6 +6,7 @@ from pathlib import Path
import django.contrib.postgres.fields
from dacite.core import from_dict
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from yaml import load
@ -14,7 +15,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
from authentik.lib.config import CONFIG
def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
"""Check if blueprint should be imported"""
from authentik.blueprints.models import BlueprintInstanceStatus
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
@ -29,17 +30,15 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
if version != 1:
return
blueprint_file.seek(0)
instance: BlueprintInstance = (
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
)
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
meta = None
if metadata:
meta = from_dict(BlueprintMetadata, metadata)
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
return
if not instance:
BlueprintInstance.objects.using(db_alias).create(
instance = BlueprintInstance(
name=meta.name if meta else str(rel_path),
path=str(rel_path),
context={},
@ -47,8 +46,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
enabled=True,
managed_models=[],
last_applied_hash="",
metadata=metadata or {},
metadata=metadata,
)
instance.save()
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
@ -56,8 +56,8 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
Flow = apps.get_model("authentik_flows", "Flow")
db_alias = schema_editor.connection.alias
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
check_blueprint_v1_file(BlueprintInstance, db_alias, Path(file))
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
check_blueprint_v1_file(BlueprintInstance, Path(file))
for blueprint in BlueprintInstance.objects.using(db_alias).all():
# If we already have flows (and we should always run before flow migrations)
@ -71,6 +71,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
class Migration(migrations.Migration):
initial = True
dependencies = [("authentik_flows", "0001_initial")]
@ -85,12 +86,7 @@ class Migration(migrations.Migration):
"managed",
models.TextField(
default=None,
help_text=(
"Objects which are managed by authentik. These objects are created and"
" updated automatically. This is flag only indicates that an object can"
" be overwritten by migrations. You can still modify the objects via"
" the API, but expect changes to be overwritten in a later update."
),
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",

View File

@ -1,22 +0,0 @@
# Generated by Django 4.1.5 on 2023-01-10 19:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="blueprintinstance",
name="content",
field=models.TextField(blank=True, default=""),
),
migrations.AlterField(
model_name="blueprintinstance",
name="path",
field=models.TextField(blank=True, default=""),
),
]

View File

@ -1,31 +0,0 @@
# Generated by Django 4.1.7 on 2023-04-28 10:49
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0002_blueprintinstance_content"),
]
operations = [
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
migrations.AlterField(
model_name="blueprintinstance",
name="name",
field=models.TextField(unique=True),
),
migrations.AlterField(
model_name="blueprintinstance",
name="managed",
field=models.TextField(
default=None,
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
),
),
]

View File

@ -1,44 +1,58 @@
"""blueprint models"""
from pathlib import Path
from urllib.parse import urlparse
from uuid import uuid4
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils.translation import gettext_lazy as _
from opencontainers.distribution.reggie import (
NewClient,
WithDebug,
WithDefaultName,
WithDigest,
WithReference,
WithUserAgent,
WithUsernamePassword,
)
from requests.exceptions import RequestException
from rest_framework.serializers import Serializer
from structlog import get_logger
from authentik.blueprints.v1.oci import OCI_PREFIX, BlueprintOCIClient, OCIException
from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
LOGGER = get_logger()
class BlueprintRetrievalFailed(SentryIgnoredException):
"""Error raised when we are unable to fetch the blueprint contents, whether it be HTTP files
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
not being accessible or local files not being readable"""
class ManagedModel(models.Model):
"""Model that can be managed by authentik exclusively"""
"""Model which can be managed by authentik exclusively"""
managed = models.TextField(
default=None,
null=True,
verbose_name=_("Managed by authentik"),
help_text=_(
"Objects that are managed by authentik. These objects are created and updated "
"automatically. This flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
)
),
unique=True,
)
class Meta:
abstract = True
@ -58,10 +72,9 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField(default="", blank=True)
content = models.TextField(default="", blank=True)
path = models.TextField()
context = models.JSONField(default=dict)
last_applied = models.DateTimeField(auto_now=True)
last_applied_hash = models.TextField()
@ -71,7 +84,74 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
enabled = models.BooleanField(default=True)
managed_models = ArrayField(models.TextField(), default=list)
def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry"""
url = urlparse(self.path)
ref = "latest"
path = url.path[1:]
if ":" in url.path:
path, _, ref = path.partition(":")
client = NewClient(
f"https://{url.hostname}",
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(url.username, url.password),
WithDefaultName(path),
WithDebug(True),
)
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
manifest_request = client.NewRequest(
"GET",
"/v2/<name>/manifests/<reference>",
WithReference(ref),
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
try:
manifest_response = client.Do(manifest_request)
manifest_response.raise_for_status()
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
manifest = manifest_response.json()
if "errors" in manifest:
raise BlueprintRetrievalFailed(manifest["errors"])
blob = None
for layer in manifest.get("layers", []):
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
blob = layer.get("digest")
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
if not blob:
raise BlueprintRetrievalFailed("Blob not found")
blob_request = client.NewRequest(
"GET",
"/v2/<name>/blobs/<digest>",
WithDigest(blob),
)
try:
blob_response = client.Do(blob_request)
blob_response.raise_for_status()
return blob_response.text
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if self.path.startswith("oci://"):
return self.retrieve_oci()
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
@property
def serializer(self) -> Serializer:
from authentik.blueprints.api import BlueprintInstanceSerializer
return BlueprintInstanceSerializer
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (
@ -80,41 +160,3 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
"path",
),
)
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry"""
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://"))
try:
manifests = client.fetch_manifests()
return client.fetch_blobs(manifests)
except OCIException as exc:
raise BlueprintRetrievalFailed(exc) from exc
def retrieve_file(self) -> str:
"""Get blueprint from path"""
try:
base = Path(CONFIG.get("blueprints_dir"))
full_path = base.joinpath(Path(self.path)).resolve()
if not str(full_path).startswith(str(base.resolve())):
raise BlueprintRetrievalFailed("Invalid blueprint path")
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
except OSError as exc:
raise BlueprintRetrievalFailed(exc) from exc
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if self.path.startswith(OCI_PREFIX):
return self.retrieve_oci()
if self.path != "":
return self.retrieve_file()
return self.content
@property
def serializer(self) -> Serializer:
from authentik.blueprints.api import BlueprintInstanceSerializer
return BlueprintInstanceSerializer

View File

@ -1,18 +1,12 @@
"""blueprint Settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"blueprints_v1_discover": {
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
"blueprints_v1_cleanup": {
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,7 +1,7 @@
"""Blueprint helpers"""
from collections.abc import Callable
from functools import wraps
from pathlib import Path
from typing import Callable
from django.apps import apps
@ -21,7 +21,7 @@ def apply_blueprint(*files: str):
def wrapper(*args, **kwargs):
for file in files:
content = BlueprintInstance(path=file).retrieve()
Importer.from_string(content).apply()
Importer(content).apply()
return func(*args, **kwargs)
return wrapper
@ -39,9 +39,19 @@ def reconcile_app(app_name: str):
def wrapper(*args, **kwargs):
config = apps.get_app_config(app_name)
if isinstance(config, ManagedAppConfig):
config._on_startup_callback(None)
config.reconcile()
return func(*args, **kwargs)
return wrapper
return wrapper_outer
def load_yaml_fixture(path: str, **kwargs) -> str:
"""Load yaml fixture, optionally formatting it with kwargs"""
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
fixture = _fixture.read()
try:
return fixture % kwargs
except TypeError:
return fixture

View File

@ -1,52 +0,0 @@
version: 1
metadata:
name: test conditional fields
labels:
blueprints.goauthentik.io/description: |
Some models have conditional fields that are only allowed in blueprint contexts
- Token (key)
- Application (icon)
- Source (icon)
- Flow (background)
entries:
- model: authentik_core.token
identifiers:
identifier: "%(uid)s-token"
attrs:
key: "%(uid)s"
user: "%(user)s"
intent: api
- model: authentik_core.application
identifiers:
slug: "%(uid)s-app"
attrs:
name: "%(uid)s-app"
icon: https://goauthentik.io/img/icon.png
- model: authentik_sources_oauth.oauthsource
identifiers:
slug: "%(uid)s-source"
attrs:
name: "%(uid)s-source"
provider_type: azuread
consumer_key: "%(uid)s"
consumer_secret: "%(uid)s"
icon: https://goauthentik.io/img/icon.png
- model: authentik_flows.flow
identifiers:
slug: "%(uid)s-flow"
attrs:
name: "%(uid)s-flow"
title: "%(uid)s-flow"
designation: authentication
background: https://goauthentik.io/img/icon.png
- model: authentik_core.user
identifiers:
username: "%(uid)s"
attrs:
name: "%(uid)s"
password: "%(uid)s"
- model: authentik_core.user
identifiers:
username: "%(uid)s-no-password"
attrs:
name: "%(uid)s"

View File

@ -7,5 +7,7 @@ entries:
state: absent
- identifiers:
name: "%(id)s"
expression: |
return True
model: authentik_policies_expression.expressionpolicy
state: absent

View File

@ -4,7 +4,6 @@ entries:
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
model: authentik_stages_prompt.prompt
attrs:
name: qwerweqrq
field_key: username
label: Username
type: username

Some files were not shown because too many files have changed in this diff Show More