Compare commits

..

2 Commits

Author SHA1 Message Date
5a7508d2e0 core: fix token expiration not being updated upon key rotation
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-08-12 17:24:19 +02:00
9c31ea1aa6 core: fix expired tokens not being returned by API
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-08-12 17:24:19 +02:00
1528 changed files with 53088 additions and 189977 deletions

View File

@ -1,20 +1,36 @@
[bumpversion]
current_version = 2022.8.2
current_version = 2021.7.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
serialize = {major}.{minor}.{patch}
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
serialize =
{major}.{minor}.{patch}-{release}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:file:pyproject.toml]
[bumpversion:part:release]
optional_value = stable
first_value = beta
values =
alpha
beta
stable
[bumpversion:file:website/docs/installation/docker-compose.md]
[bumpversion:file:docker-compose.yml]
[bumpversion:file:schema.yml]
[bumpversion:file:.github/workflows/release.yml]
[bumpversion:file:authentik/__init__.py]
[bumpversion:file:internal/constants/constants.go]
[bumpversion:file:web/src/constants.ts]
[bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md]
[bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md]

View File

@ -27,7 +27,7 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- authentik version: [e.g. 0.10.0-stable]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -20,7 +20,7 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- authentik version: [e.g. 0.10.0-stable]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -1,92 +0,0 @@
name: 'Comment usage instructions on PRs'
description: 'Comment usage instructions on PRs'
inputs:
tag:
description: "Image tag to pull"
required: true
runs:
using: "composite"
steps:
- name: Generate config
id: ev
shell: python
run: |
"""Helper script to get the actual branch name, docker safe"""
import os
from time import time
env_pr_branch = "GITHUB_HEAD_REF"
default_branch = "GITHUB_REF"
sha = "GITHUB_SHA"
branch_name = os.environ[default_branch]
if os.environ.get(env_pr_branch, "") != "":
branch_name = os.environ[env_pr_branch]
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
print("##[set-output name=branchName]%s" % branch_name)
print(
"##[set-output name=branchNameContainer]%s"
% branch_name.replace("refs/heads/", "").replace("/", "-")
)
print("##[set-output name=timestamp]%s" % int(time()))
print("##[set-output name=sha]%s" % os.environ[sha])
print("##[set-output name=shouldBuild]%s" % should_build)
import configparser
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
version = parser.get("bumpversion", "current_version")
version_family = ".".join(version.split(".")[:-1])
print("##[set-output name=version]%s" % version)
print("##[set-output name=versionFamily]%s" % version_family)
- name: Find Comment
uses: peter-evans/find-comment@v2
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: authentik PR Installation instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body: |
authentik PR Installation instructions
<details>
<summary>Instructions for docker-compose</summary>
Add the following block to your `.env` file:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
<summary>Instructions for Kubernetes</summary>
Add the following block to your `values.yml` file:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}
# pullPolicy: Always to ensure you always get the latest version
pullPolicy: Always
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -1,63 +0,0 @@
name: 'Prepare docker environment variables'
description: 'Prepare docker environment variables'
outputs:
shouldBuild:
description: "Whether to build image or not"
value: ${{ steps.ev.outputs.shouldBuild }}
branchName:
description: "Branch name"
value: ${{ steps.ev.outputs.branchName }}
branchNameContainer:
description: "Branch name (for containers)"
value: ${{ steps.ev.outputs.branchNameContainer }}
timestamp:
description: "Timestamp"
value: ${{ steps.ev.outputs.timestamp }}
sha:
description: "sha"
value: ${{ steps.ev.outputs.sha }}
version:
description: "version"
value: ${{ steps.ev.outputs.version }}
versionFamily:
description: "versionFamily"
value: ${{ steps.ev.outputs.versionFamily }}
runs:
using: "composite"
steps:
- name: Generate config
id: ev
shell: python
run: |
"""Helper script to get the actual branch name, docker safe"""
import os
from time import time
env_pr_branch = "GITHUB_HEAD_REF"
default_branch = "GITHUB_REF"
sha = "GITHUB_SHA"
branch_name = os.environ[default_branch]
if os.environ.get(env_pr_branch, "") != "":
branch_name = os.environ[env_pr_branch]
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
print("##[set-output name=branchName]%s" % branch_name)
print(
"##[set-output name=branchNameContainer]%s"
% branch_name.replace("refs/heads/", "").replace("/", "-")
)
print("##[set-output name=timestamp]%s" % int(time()))
print("##[set-output name=sha]%s" % os.environ[sha])
print("##[set-output name=shouldBuild]%s" % should_build)
import configparser
parser = configparser.ConfigParser()
parser.read(".bumpversion.cfg")
version = parser.get("bumpversion", "current_version")
version_family = ".".join(version.split(".")[:-1])
print("##[set-output name=version]%s" % version)
print("##[set-output name=versionFamily]%s" % version_family)

View File

@ -1,45 +0,0 @@
name: 'Setup authentik testing environemnt'
description: 'Setup authentik testing environemnt'
runs:
using: "composite"
steps:
- name: Install poetry
shell: bash
run: |
pipx install poetry || true
sudo apt update
sudo apt install -y libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v3
with:
python-version: '3.10'
cache: 'poetry'
- name: Setup node
uses: actions/setup-node@v3.1.0
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Setup dependencies
shell: bash
run: |
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry env use python3.10
poetry install
npm install -g pyright@1.1.136
- name: Generate config
shell: poetry run python {0}
run: |
from authentik.lib.generators import generate_id
from yaml import safe_dump
with open("local.env.yml", "w") as _config:
safe_dump(
{
"log_level": "debug",
"secret_key": generate_id(),
},
_config,
default_flow_style=False,
)

View File

@ -1,3 +0,0 @@
keypair
keypairs
hass

108
.github/dependabot.yml vendored
View File

@ -1,62 +1,50 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
assignees:
- BeryJu

View File

@ -1,7 +1,7 @@
<!--
👋 Hello there! Welcome.
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/main/CONTRIBUTING.md#how-can-i-contribute).
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/master/CONTRIBUTING.md#how-can-i-contribute).
-->
# Details

4
.github/stale.yml vendored
View File

@ -7,10 +7,6 @@ exemptLabels:
- pinned
- security
- pr_wanted
- enhancement
- bug/confirmed
- enhancement/confirmed
- question
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had

View File

@ -1,243 +0,0 @@
name: authentik-ci-main
on:
push:
branches:
- main
- next
- version-*
paths-ignore:
- website
pull_request:
branches:
- main
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
lint:
strategy:
fail-fast: false
matrix:
job:
- pylint
- black
- isort
- bandit
- pyright
- pending-migrations
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run job
run: poetry run make ci-${{ matrix.job }}
test-migrations:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-migrations-from-stable:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Setup authentik env
uses: ./.github/actions/setup
- name: checkout stable
run: |
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout $(git describe --abbrev=0 --match 'version/*')
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (ensure stable deps are installed)
uses: ./.github/actions/setup
- name: run migrations to stable
run: poetry run python -m lifecycle.migrate
- name: checkout current code
run: |
set -x
git fetch
git reset --hard HEAD
git clean -d -fx .
git checkout $GITHUB_SHA
poetry install
- name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup
- name: migrate to latest
run: poetry run python -m lifecycle.migrate
test-unittest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: testspace-com/setup-testspace@v1
with:
domain: ${{github.repository_owner}}
- name: run unittest
run: |
poetry run make test
poetry run coverage xml
- name: run testspace
if: ${{ always() }}
run: |
testspace [unittest]unittest.xml --link=codecov
- if: ${{ always() }}
uses: codecov/codecov-action@v3
test-integration:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: testspace-com/setup-testspace@v1
with:
domain: ${{github.repository_owner}}
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1.3.0
- name: run integration
run: |
poetry run make test-integration
poetry run coverage xml
- name: run testspace
if: ${{ always() }}
run: |
testspace [integration]unittest.xml --link=codecov
- if: ${{ always() }}
uses: codecov/codecov-action@v3
test-e2e-provider:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: testspace-com/setup-testspace@v1
with:
domain: ${{github.repository_owner}}
- name: Setup authentik env
run: |
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
run: |
npm ci
make -C .. gen-client-ts
npm run build
- name: run e2e
run: |
poetry run make test-e2e-provider
poetry run coverage xml
- name: run testspace
if: ${{ always() }}
run: |
testspace [e2e-provider]unittest.xml --link=codecov
- if: ${{ always() }}
uses: codecov/codecov-action@v3
test-e2e-rest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: testspace-com/setup-testspace@v1
with:
domain: ${{github.repository_owner}}
- name: Setup authentik env
run: |
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web/
run: |
npm ci
make -C .. gen-client-ts
npm run build
- name: run e2e
run: |
poetry run make test-e2e-rest
poetry run coverage xml
- name: run testspace
if: ${{ always() }}
run: |
testspace [e2e-rest]unittest.xml --link=codecov
- if: ${{ always() }}
uses: codecov/codecov-action@v3
ci-core-mark:
needs:
- lint
- test-migrations
- test-migrations-from-stable
- test-unittest
- test-integration
- test-e2e-rest
- test-e2e-provider
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
arch:
- 'linux/amd64'
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
- name: Comment on PR
if: github.event_name == 'pull_request'
continue-on-error: true
uses: ./.github/actions/comment-pr-instructions
with:
tag: gh-${{ steps.ev.outputs.branchNameContainer }}

View File

@ -1,135 +0,0 @@
name: authentik-ci-outpost
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
jobs:
lint-golint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- name: Prepare and generate API
run: |
# Create folder structure for go embeds
mkdir -p web/dist
mkdir -p website/help
touch web/dist/test website/help/test
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
test-unittest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- name: Generate API
run: make gen-client-go
- name: Go unittests
run: |
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
ci-outpost-mark:
needs:
- lint-golint
- test-unittest
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
timeout-minutes: 120
needs:
- ci-outpost-mark
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
arch:
- 'linux/amd64'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
file: ${{ matrix.type }}.Dockerfile
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
build-outpost-binary:
timeout-minutes: 120
needs:
- ci-outpost-mark
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
goos: [linux]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go
- name: Build web
working-directory: web/
run: |
npm ci
npm run build-proxy
- name: Build outpost
run: |
set -x
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- uses: actions/upload-artifact@v3
with:
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@ -1,92 +0,0 @@
name: authentik-ci-web
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
jobs:
lint-eslint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: Eslint
working-directory: web/
run: npm run lint
lint-prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: prettier
working-directory: web/
run: npm run prettier-check
lint-lit-analyse:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: |
npm ci
# lit-analyse doesn't understand path rewrites, so make it
# belive it's an actual module
cd node_modules/@goauthentik
ln -s ../../src/ web
- name: Generate API
run: make gen-client-ts
- name: lit-analyse
working-directory: web/
run: npm run lit-analyse
ci-web-mark:
needs:
- lint-eslint
- lint-prettier
- lint-lit-analyse
runs-on: ubuntu-latest
steps:
- run: echo mark
build:
needs:
- ci-web-mark
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: build
working-directory: web/
run: npm run build

View File

@ -1,33 +0,0 @@
name: authentik-ci-website
on:
push:
branches:
- main
- next
- version-*
pull_request:
branches:
- main
jobs:
lint-prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: prettier
working-directory: website/
run: npm run prettier-check
ci-website-mark:
needs:
- lint-prettier
runs-on: ubuntu-latest
steps:
- run: echo mark

View File

@ -1,60 +0,0 @@
name: "CodeQL"
on:
push:
branches: [ main, '*', next, version* ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: '30 6 * * 5'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'go', 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -1,22 +0,0 @@
name: ghcr-retention
on:
schedule:
- cron: '0 0 * * *' # every day at midnight
workflow_dispatch:
jobs:
clean-ghcr:
name: Delete old unused container images
runs-on: ubuntu-latest
steps:
- name: Delete 'dev' containers older than a week
uses: sondrelg/container-retention-policy@v1
with:
image-names: dev-server,dev-ldap,dev-proxy
cut-off: One week ago UTC
account-type: org
org-name: goauthentik
untagged-only: false
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
skip-tags: gh-next,gh-main

View File

@ -1,171 +0,0 @@
name: authentik-on-release
on:
release:
types: [published, created]
jobs:
build-server:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
- name: Docker Login Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik:${{ steps.ev.outputs.version }},
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
beryju/authentik:latest,
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
ghcr.io/goauthentik/server:latest
platforms: linux/amd64,linux/arm64
context: .
build-outpost:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
- name: Docker Login Registry
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
beryju/authentik-${{ matrix.type }}:latest,
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
ghcr.io/goauthentik/${{ matrix.type }}:latest
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
build-outpost-binary:
timeout-minutes: 120
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
type:
- proxy
- ldap
goos: [linux, darwin]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Build web
working-directory: web/
run: |
npm ci
npm run build-proxy
- name: Build outpost
run: |
set -x
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }}
test-release:
needs:
- build-server
- build-outpost
- build-outpost-binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Run test suite in final docker images
run: |
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
sentry-release:
needs:
- build-server
- build-outpost
- build-outpost-binary
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
- name: Get static files from docker image
run: |
docker pull ghcr.io/goauthentik/server:latest
container=$(docker container create ghcr.io/goauthentik/server:latest)
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@v1
continue-on-error: true
if: ${{ github.event_name == 'release' }}
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: beryjuorg
SENTRY_PROJECT: authentik
SENTRY_URL: https://sentry.beryju.org
with:
version: authentik@${{ steps.ev.outputs.version }}
environment: beryjuorg-prod
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

182
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,182 @@
name: authentik-on-release
on:
release:
types: [published, created]
push:
branches:
- version-*
jobs:
# Build
build-server:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Docker Login Registry
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v2
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik:2021.7.3,
beryju/authentik:latest,
ghcr.io/goauthentik/server:2021.7.3,
ghcr.io/goauthentik/server:latest
platforms: linux/amd64,linux/arm64
context: .
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
run: |
docker pull beryju/authentik:latest
docker tag beryju/authentik:latest beryju/authentik:stable
docker push beryju/authentik:stable
docker pull ghcr.io/goauthentik/server:latest
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
docker push ghcr.io/goauthentik/server:stable
build-proxy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2
with:
go-version: "^1.15"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Docker Login Registry
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v2
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-proxy:2021.7.3,
beryju/authentik-proxy:latest,
ghcr.io/goauthentik/proxy:2021.7.3,
ghcr.io/goauthentik/proxy:latest
file: proxy.Dockerfile
platforms: linux/amd64,linux/arm64
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
run: |
docker pull beryju/authentik-proxy:latest
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
docker push beryju/authentik-proxy:stable
docker pull ghcr.io/goauthentik/proxy:latest
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
docker push ghcr.io/goauthentik/proxy:stable
build-ldap:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-go@v2
with:
go-version: "^1.15"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Docker Login Registry
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Building Docker Image
uses: docker/build-push-action@v2
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-ldap:2021.7.3,
beryju/authentik-ldap:latest,
ghcr.io/goauthentik/ldap:2021.7.3,
ghcr.io/goauthentik/ldap:latest
file: ldap.Dockerfile
platforms: linux/amd64,linux/arm64
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.7.3', 'rc') }}
run: |
docker pull beryju/authentik-ldap:latest
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
docker push beryju/authentik-ldap:stable
docker pull ghcr.io/goauthentik/ldap:latest
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
docker push ghcr.io/goauthentik/ldap:stable
test-release:
needs:
- build-server
- build-proxy
- build-ldap
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run test suite in final docker images
run: |
sudo apt-get install -y pwgen
echo "PG_PASS=$(pwgen 40 1)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(pwgen 50 1)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test
sentry-release:
if: ${{ github.event_name == 'release' }}
needs:
- test-release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2.3.0
with:
node-version: 12.x
- name: Build web api client and web ui
run: |
export NODE_ENV=production
make gen-web
cd web
npm i
npm run build
- name: Create a Sentry.io release
uses: getsentry/action-release@v1
if: ${{ github.event_name == 'release' }}
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: beryjuorg
SENTRY_PROJECT: authentik
SENTRY_URL: https://sentry.beryju.org
with:
version: authentik@2021.7.3
environment: beryjuorg-prod
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

View File

@ -10,21 +10,24 @@ jobs:
name: Create Release from Tag
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Pre-release test
run: |
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
sudo apt-get install -y pwgen
echo "AUTHENTIK_TAG=latest" >> .env
echo "PG_PASS=$(pwgen 40 1)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(pwgen 50 1)" >> .env
docker-compose pull -q
docker build \
--no-cache \
-t ghcr.io/goauthentik/server:latest \
-f Dockerfile .
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
docker-compose run -u root server test
- name: Extract version number
id: get_version
uses: actions/github-script@v6
uses: actions/github-script@v4.0.2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@ -1,36 +0,0 @@
name: authentik-backend-translate-compile
on:
push:
branches: [ main ]
paths:
- '/locale/'
pull_request:
paths:
- '/locale/'
workflow_dispatch:
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
compile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
run: poetry run ./manage.py compilemessages
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: compile-backend-translation
commit-message: "core: compile backend translations"
title: "core: compile backend translations"
body: "core: compile backend translations"
delete-branch: true
signoff: true

View File

@ -1,42 +0,0 @@
name: authentik-web-api-publish
on:
push:
branches: [ main ]
paths:
- 'schema.yml'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v3.4.1
with:
node-version: '16'
registry-url: 'https://registry.npmjs.org'
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm ci
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
- name: Upgrade /web
working-directory: web/
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true

7
.gitignore vendored
View File

@ -66,9 +66,7 @@ coverage.xml
unittest.xml
# Translations
# Have to include binary mo files as they are annoying to compile at build time
# since a full postgres and redis instance are required
# *.mo
*.mo
# Django stuff:
@ -193,6 +191,7 @@ pip-selfcheck.json
# End of https://www.gitignore.io/api/python,django
/static/
local.env.yml
.vscode/
# Selenium Screenshots
selenium_screenshots/
@ -201,4 +200,4 @@ media/
*mmdb
.idea/
/gen-*/
/api/

34
.vscode/settings.json vendored
View File

@ -1,34 +0,0 @@
{
"cSpell.words": [
"akadmin",
"asgi",
"authentik",
"authn",
"goauthentik",
"jwks",
"oidc",
"openid",
"plex",
"saml",
"totp",
"webauthn",
"traefik",
"passwordless",
"kubernetes"
],
"python.linting.pylintEnabled": true,
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"python.formatting.provider": "black",
"yaml.customTags": [
"!Find sequence",
"!KeyOf scalar"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./web/node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true,
"yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml"
}
}

86
.vscode/tasks.json vendored
View File

@ -1,86 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "authentik[core]: format & test",
"command": "poetry",
"args": [
"run",
"make"
],
"group": "build",
},
{
"label": "authentik[core]: run",
"command": "poetry",
"args": [
"run",
"make",
"run",
],
"group": "build",
"presentation": {
"panel": "dedicated",
"group": "running"
},
},
{
"label": "authentik[web]: format",
"command": "make",
"args": ["web"],
"group": "build",
},
{
"label": "authentik[web]: watch",
"command": "make",
"args": ["web-watch"],
"group": "build",
"presentation": {
"panel": "dedicated",
"group": "running"
},
},
{
"label": "authentik: install",
"command": "make",
"args": ["install"],
"group": "build",
},
{
"label": "authentik: i18n-extract",
"command": "poetry",
"args": [
"run",
"make",
"i18n-extract"
],
"group": "build",
},
{
"label": "authentik[website]: format",
"command": "make",
"args": ["website"],
"group": "build",
},
{
"label": "authentik[website]: watch",
"command": "make",
"args": ["website-watch"],
"group": "build",
"presentation": {
"panel": "dedicated",
"group": "running"
},
},
{
"label": "authentik[api]: generate",
"command": "poetry",
"args": [
"run",
"make",
"gen"
],
"group": "build"
},
]
}

View File

@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
hello@goauthentik.io.
hello@beryju.org.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@ -11,8 +11,8 @@ The following is a set of guidelines for contributing to authentik and its compo
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
* [The components](#the-components)
* [authentik's structure](#authentiks-structure)
* [Atom and Packages](#atom-and-packages)
* [Atom Design Decisions](#design-decisions)
[How Can I Contribute?](#how-can-i-contribute)
* [Reporting Bugs](#reporting-bugs)
@ -22,16 +22,21 @@ The following is a set of guidelines for contributing to authentik and its compo
[Styleguides](#styleguides)
* [Git Commit Messages](#git-commit-messages)
* [Python Styleguide](#python-styleguide)
* [JavaScript Styleguide](#javascript-styleguide)
* [CoffeeScript Styleguide](#coffeescript-styleguide)
* [Specs Styleguide](#specs-styleguide)
* [Documentation Styleguide](#documentation-styleguide)
[Additional Notes](#additional-notes)
* [Issue and Pull Request Labels](#issue-and-pull-request-labels)
## Code of Conduct
Basically, don't be a dickhead. This is an open-source non-profit project, that is made in the free time of Volunteers. If there's something you dislike or think can be done better, tell us! We'd love to hear any suggestions for improvement.
## I don't want to read this whole thing I just have a question!!!
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://goauthentik.io/discord)
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://discord.gg/jg33eMhnj6)
## What should I know before I get started?
@ -117,7 +122,7 @@ This section guides you through submitting a bug report for authentik. Following
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
This form will have the full stack trace of the error that ocurred and shouldn't contain any sensitive data.
### Suggesting Enhancements
@ -131,7 +136,7 @@ When you are creating an enhancement suggestion, please fill in [the template](h
authentik can be run locally, all though depending on which part you want to work on, different pre-requisites are required.
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
This is documented in the [developer docs](https://goauthentik.io/developer-docs/)
### Pull Requests

View File

@ -1,100 +1,113 @@
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
# Stage 1: Lock python dependencies
FROM python:3.9-slim-buster as locker
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
COPY ./Pipfile /app/
COPY ./Pipfile.lock /app/
WORKDIR /app/
RUN pip install pipenv && \
pipenv lock -r > requirements.txt && \
pipenv lock -r --dev-only > requirements-dev.txt
# Stage 2: Build website
FROM node as website-builder
COPY ./website /static/
ENV NODE_ENV=production
WORKDIR /work/website
RUN npm ci && npm run build-docs-only
RUN cd /static && npm i && npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
# Stage 3: Build web API
FROM openapitools/openapi-generator-cli as web-api-builder
COPY ./web /work/web/
COPY ./website /work/website/
COPY ./schema.yml /local/schema.yml
RUN docker-entrypoint.sh generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/web/api \
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
# Stage 3: Generate API Client
FROM openapitools/openapi-generator-cli as go-api-builder
COPY ./schema.yml /local/schema.yml
RUN docker-entrypoint.sh generate \
--git-host goauthentik.io \
--git-repo-id outpost \
--git-user-id api \
-i /local/schema.yml \
-g go \
-o /local/api \
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true && \
rm -f /local/api/go.mod /local/api/go.sum
# Stage 4: Build webui
FROM node as web-builder
COPY ./web /static/
COPY --from=web-api-builder /local/web/api /static/api
ENV NODE_ENV=production
WORKDIR /work/web
RUN npm ci && npm run build
RUN cd /static && npm i && npm run build
# Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.10.6-slim-bullseye AS poetry-locker
WORKDIR /work
COPY ./pyproject.toml /work
COPY ./poetry.lock /work
RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --output requirements.txt && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.19.0-bullseye AS go-builder
# Stage 5: Build go proxy
FROM golang:1.16.6 AS builder
WORKDIR /work
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
COPY --from=web-builder /static/robots.txt /work/web/robots.txt
COPY --from=web-builder /static/security.txt /work/web/security.txt
COPY --from=web-builder /static/dist/ /work/web/dist/
COPY --from=web-builder /static/authentik/ /work/web/authentik/
COPY --from=website-builder /static/help/ /work/website/help/
COPY --from=go-api-builder /local/api api
COPY ./cmd /work/cmd
COPY ./web/static.go /work/web/static.go
COPY ./website/static.go /work/website/static.go
COPY ./internal /work/internal
COPY ./go.mod /work/go.mod
COPY ./go.sum /work/go.sum
RUN go build -o /work/authentik ./cmd/server/main.go
# Stage 5: Run
FROM docker.io/python:3.10.6-slim-bullseye AS final-image
LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
# Stage 6: Run
FROM python:3.9-slim-buster
WORKDIR /
COPY --from=locker /app/requirements.txt /
COPY --from=locker /app/requirements-dev.txt /
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
COPY --from=poetry-locker /work/requirements.txt /
COPY --from=poetry-locker /work/requirements-dev.txt /
RUN apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev && \
# Required for runtime
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends curl runit && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
pip install -r /requirements.txt --no-cache-dir && \
apt-get remove --purge -y build-essential git && \
apt-get autoremove --purge -y && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
mkdir -p /certs /media /blueprints && \
mkdir -p /authentik/.ssh && \
chown authentik:authentik /certs /media /authentik/.ssh
mkdir /backups && \
chown authentik:authentik /backups
COPY ./authentik/ /authentik
COPY ./pyproject.toml /
COPY ./xml /xml
COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /work/authentik /authentik-proxy
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/help/ /website/help/
USER 1000
COPY --from=builder /work/authentik /authentik-proxy
USER authentik
ENV TMPDIR /dev/shm/
ENV PYTHONUNBUFFERED 1
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
ENV PYTHONUBUFFERED 1
ENTRYPOINT [ "/lifecycle/bootstrap.sh" ]

220
Makefile
View File

@ -2,197 +2,67 @@
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
all: lint-fix lint test gen web
all: lint-fix lint test gen
test-integration:
coverage run manage.py test tests/integration
k3d cluster create || exit 0
k3d kubeconfig write -o ~/.kube/config --overwrite
coverage run manage.py test -v 3 tests/integration
test-e2e-provider:
coverage run manage.py test tests/e2e/test_provider*
test-e2e-rest:
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
test-go:
go test -timeout 0 -v -race -cover ./...
test-docker:
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test
rm -f .env
test-e2e:
coverage run manage.py test --failfast -v 3 tests/e2e
test:
coverage run manage.py test authentik
coverage run manage.py test -v 3 authentik
coverage html
coverage report
lint-fix:
isort authentik tests scripts lifecycle
black authentik tests scripts lifecycle
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
authentik \
internal \
cmd \
web/src \
website/src \
website/docs \
website/developer-docs
isort authentik tests lifecycle
black authentik tests lifecycle
lint:
pylint authentik tests lifecycle
pyright authentik tests lifecycle
bandit -r authentik tests lifecycle -x node_modules
golangci-lint run -v
pylint authentik tests lifecycle
gen-build:
./manage.py spectacular --file schema.yml
gen-clean:
rm -rf web/api/src/
rm -rf api/
gen-web:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
openapitools/openapi-generator-cli generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/web/api \
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
cd web/api && npx tsc
gen-outpost:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
openapitools/openapi-generator-cli generate \
--git-host goauthentik.io \
--git-repo-id outpost \
--git-user-id api \
-i /local/schema.yml \
-g go \
-o /local/api \
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true
rm -f api/go.mod api/go.sum
gen: gen-build gen-clean gen-web gen-outpost
migrate:
python -m lifecycle.migrate
run:
go run -v cmd/server/main.go
i18n-extract: i18n-extract-core web-extract
i18n-extract-core:
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
#########################
## API Schema
#########################
gen-build:
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
gen-diff:
git show $(shell git tag -l | tail -n 1):schema.yml > old_schema.yml
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-diff:2.0.1 \
--markdown /local/diff.md \
/local/old_schema.yml /local/schema.yml
rm old_schema.yml
gen-clean:
rm -rf web/api/src/
rm -rf api/
gen-client-ts:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/gen-ts-api \
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=@goauthentik/api,npmVersion=${NPM_VERSION}
mkdir -p web/node_modules/@goauthentik/api
\cp -fv scripts/web_api_readme.md gen-ts-api/README.md
cd gen-ts-api && npm i
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
gen-client-go:
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
mkdir -p templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g go \
-o /local/gen-go-api \
-c /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
rm -rf config.yaml ./templates/
gen-dev-config:
python -m scripts.generate_config
gen: gen-build gen-clean gen-client-ts
#########################
## Web
#########################
web-build: web-install
cd web && npm run build
web: web-lint-fix web-lint
web-install:
cd web && npm ci
web-watch:
rm -rf web/dist/
mkdir web/dist/
touch web/dist/.gitkeep
cd web && npm run watch
web-lint-fix:
cd web && npm run prettier
web-lint:
cd web && npm run lint
cd web && npm run lit-analyse
web-extract:
cd web && npm run extract
#########################
## Website
#########################
website: website-lint-fix
website-install:
cd website && npm ci
website-lint-fix:
cd website && npm run prettier
website-watch:
cd website && npm run watch
# These targets are use by GitHub actions to allow usage of matrix
# which makes the YAML File a lot smaller
ci--meta-debug:
python -V
node --version
ci-pylint: ci--meta-debug
pylint authentik tests lifecycle
ci-black: ci--meta-debug
black --check authentik tests lifecycle
ci-isort: ci--meta-debug
isort --check authentik tests lifecycle
ci-bandit: ci--meta-debug
bandit -r authentik tests lifecycle
ci-pyright: ci--meta-debug
pyright e2e lifecycle
ci-pending-migrations: ci--meta-debug
ak makemigrations --check
install: web-install website-install
poetry install
dev-reset:
dropdb -U postgres -h localhost authentik
createdb -U postgres -h localhost authentik
redis-cli -n 0 flushall
redis-cli -n 1 flushall
redis-cli -n 2 flushall
redis-cli -n 3 flushall
make migrate

66
Pipfile Normal file
View File

@ -0,0 +1,66 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[packages]
boto3 = "*"
celery = "*"
channels = "*"
channels-redis = "*"
dacite = "*"
defusedxml = "*"
django = "*"
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
django-filter = "*"
django-guardian = "*"
django-model-utils = "*"
django-otp = "*"
django-prometheus = "*"
django-redis = "*"
django-storages = "*"
djangorestframework = "*"
djangorestframework-guardian = "*"
docker = "*"
drf-spectacular = "*"
facebook-sdk = "*"
geoip2 = "*"
gunicorn = "*"
kubernetes = "*"
ldap3 = "*"
lxml = ">=4.6.3"
packaging = "*"
psycopg2-binary = "*"
pycryptodome = "*"
pyjwt = "*"
pyyaml = "*"
requests-oauthlib = "*"
sentry-sdk = "*"
service_identity = "*"
structlog = "*"
swagger-spec-validator = "*"
twisted = "==20.3.0"
urllib3 = {extras = ["secure"],version = "*"}
uvicorn = {extras = ["standard"],version = "*"}
webauthn = "*"
xmlsec = "*"
duo-client = "*"
ua-parser = "*"
deepmerge = "*"
colorama = "*"
[requires]
python_version = "3.9"
[dev-packages]
bandit = "*"
black = "==21.5b1"
bump2version = "*"
colorama = "*"
coverage = "*"
pylint = "*"
pylint-django = "*"
pytest = "*"
pytest-django = "*"
selenium = "*"
requests-mock = "*"

1887
Pipfile.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -4,15 +4,14 @@
---
[![Join Discord](https://img.shields.io/discord/809154715984199690?label=Discord&style=for-the-badge)](https://goauthentik.io/discord)
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/goauthentik/authentik/authentik-ci-main?label=core%20build&style=for-the-badge)](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/goauthentik/authentik/authentik-ci-outpost?label=outpost%20build&style=for-the-badge)](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/goauthentik/authentik/authentik-ci-web?label=web%20build&style=for-the-badge)](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
[![Code Coverage](https://img.shields.io/codecov/c/gh/goauthentik/authentik?style=for-the-badge)](https://codecov.io/gh/goauthentik/authentik)
[![Testspace tests](https://img.shields.io/testspace/total/goauthentik/goauthentik:authentik/main?style=for-the-badge)](https://goauthentik.testspace.com/)
![Docker pulls](https://img.shields.io/docker/pulls/beryju/authentik.svg?style=for-the-badge)
![Latest version](https://img.shields.io/docker/v/beryju/authentik?sort=semver&style=for-the-badge)
[![](https://img.shields.io/badge/Help%20translate-transifex-blue?style=for-the-badge)](https://www.transifex.com/beryjuorg/authentik/)
[![](https://img.shields.io/discord/809154715984199690?label=Discord&style=flat-square)](https://discord.gg/jg33eMhnj6)
[![CI Build status](https://img.shields.io/azure-devops/build/beryjuorg/authentik/6?style=flat-square)](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
[![Tests](https://img.shields.io/azure-devops/tests/beryjuorg/authentik/6?compact_message&style=flat-square)](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
[![Code Coverage](https://img.shields.io/codecov/c/gh/goauthentik/authentik?style=flat-square)](https://codecov.io/gh/goauthentik/authentik)
![Docker pulls](https://img.shields.io/docker/pulls/beryju/authentik.svg?style=flat-square)
![Latest version](https://img.shields.io/docker/v/beryju/authentik?sort=semver&style=flat-square)
![LGTM Grade](https://img.shields.io/lgtm/grade/python/github/goauthentik/authentik?style=flat-square)
[Transifex](https://www.transifex.com/beryjuorg/authentik/)
## What is authentik?
@ -20,9 +19,9 @@ authentik is an open-source Identity Provider focused on flexibility and versati
## Installation
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/)
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
For bigger setups, there is a Helm Chart [here])(https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
## Screenshots
@ -33,28 +32,8 @@ Light | Dark
## Development
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
See [Development Documentation](https://goauthentik.io/developer-docs/)
## Security
See [SECURITY.md](SECURITY.md)
## Sponsors
This project is proudly sponsored by:
<p>
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
</a>
</p>
DigitalOcean provides development and testing resources for authentik.
<p>
<a href="https://www.netlify.com">
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
</a>
</p>
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.

View File

@ -6,10 +6,10 @@
| Version | Supported |
| ---------- | ------------------ |
| 2022.6.x | :white_check_mark: |
| 2022.7.x | :white_check_mark: |
| 2022.8.x | :white_check_mark: |
| 2021.5.x | :white_check_mark: |
| 2021.6.x | :white_check_mark: |
| 2021.7.x | :white_check_mark: |
## Reporting a Vulnerability
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io)
To report a vulnerability, send an email to [security@beryju.org](mailto:security@beryju.org)

View File

@ -1,22 +1,3 @@
"""authentik"""
from os import environ
from typing import Optional
__version__ = "2022.8.2"
__version__ = "2021.7.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
def get_build_hash(fallback: Optional[str] = None) -> str:
"""Get build hash"""
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
if build_hash == "" and fallback:
return fallback
return build_hash
def get_full_version() -> str:
"""Get full version, with build hash appended"""
version = __version__
if (build_hash := get_build_hash()) != "":
version += "." + build_hash
return version

View File

@ -18,13 +18,13 @@ class AppSerializer(PassiveSerializer):
class AppsViewSet(ViewSet):
"""Read-only view list all installed apps"""
"""Read-only view set list all installed apps"""
permission_classes = [IsAdminUser]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:
"""Read-only view list all installed apps"""
"""List current messages and pass into Serializer"""
data = []
for app in sorted(get_apps(), key=lambda app: app.name):
data.append({"name": app.name, "label": app.verbose_name})

View File

@ -1,6 +1,13 @@
"""authentik administration metrics"""
import time
from collections import Counter
from datetime import timedelta
from django.db.models import Count, ExpressionWrapper, F
from django.db.models.fields import DurationField
from django.db.models.functions import ExtractHour
from django.utils.timezone import now
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
@ -8,7 +15,34 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.models import EventAction
from authentik.events.models import Event, EventAction
def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
"""Get event count by hour in the last day, fill with zeros"""
date_from = now() - timedelta(days=1)
result = (
Event.objects.filter(created__gte=date_from, **filter_kwargs)
.annotate(
age=ExpressionWrapper(now() - F("created"), output_field=DurationField())
)
.annotate(age_hours=ExtractHour("age"))
.values("age_hours")
.annotate(count=Count("pk"))
.order_by("age_hours")
)
data = Counter({int(d["age_hours"]): d["count"] for d in result})
results = []
_now = now()
for hour in range(0, -24, -1):
results.append(
{
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple())
* 1000,
"y_cord": data[hour * -1],
}
)
return results
class CoordinateSerializer(PassiveSerializer):
@ -27,22 +61,12 @@ class LoginMetricsSerializer(PassiveSerializer):
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN)
.get_events_per_hour()
)
return get_events_per_1h(action=EventAction.LOGIN)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED)
.get_events_per_hour()
)
return get_events_per_1h(action=EventAction.LOGIN_FAILED)
class AdministrationMetricsViewSet(APIView):
@ -54,5 +78,4 @@ class AdministrationMetricsViewSet(APIView):
def get(self, request: Request) -> Response:
"""Login Metrics per 1h"""
serializer = LoginMetricsSerializer(True)
serializer.context["user"] = request.user
return Response(serializer.data)

View File

@ -16,8 +16,6 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
class RuntimeDict(TypedDict):
@ -34,18 +32,12 @@ class RuntimeDict(TypedDict):
class SystemSerializer(PassiveSerializer):
"""Get system information."""
env = SerializerMethodField()
http_headers = SerializerMethodField()
http_host = SerializerMethodField()
http_is_secure = SerializerMethodField()
runtime = SerializerMethodField()
tenant = SerializerMethodField()
server_time = SerializerMethodField()
embedded_outpost_host = SerializerMethodField()
def get_env(self, request: Request) -> dict[str, str]:
"""Get Environment"""
return os.environ.copy()
def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers"""
@ -69,7 +61,9 @@ class SystemSerializer(PassiveSerializer):
return {
"python_version": python_version,
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
"environment": "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose",
"environment": "kubernetes"
if SERVICE_HOST_ENV_NAME in os.environ
else "compose",
"architecture": platform.machine(),
"platform": platform.platform(),
"uname": " ".join(platform.uname()),
@ -83,13 +77,6 @@ class SystemSerializer(PassiveSerializer):
"""Current server time"""
return now()
def get_embedded_outpost_host(self, request: Request) -> str:
"""Get the FQDN configured on the embedded outpost"""
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts.exists(): # pragma: no cover
return ""
return outposts.first().config.authentik_host
class SystemView(APIView):
"""Get system information."""

View File

@ -12,13 +12,10 @@ from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from structlog.stdlib import get_logger
from authentik.core.api.utils import PassiveSerializer
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
LOGGER = get_logger()
class TaskSerializer(PassiveSerializer):
"""Serialize TaskInfo and TaskResult"""
@ -39,7 +36,7 @@ class TaskSerializer(PassiveSerializer):
are pickled in cache. In that case, just delete the info"""
try:
return super().to_representation(instance)
except AttributeError: # pragma: no cover
except AttributeError:
if isinstance(self.instance, list):
for inst in self.instance:
inst.delete()
@ -92,15 +89,16 @@ class TaskViewSet(ViewSet):
try:
task_module = import_module(task.task_call_module)
task_func = getattr(task_module, task.task_call_func)
LOGGER.debug("Running task", task=task_func)
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
messages.success(
self.request,
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
_(
"Successfully re-scheduled Task %(name)s!"
% {"name": task.task_name}
),
)
return Response(status=204)
except (ImportError, AttributeError): # pragma: no cover
LOGGER.warning("Failed to run task, remove state", task=task)
except ImportError: # pragma: no cover
# if we get an import error, the module path has probably changed
task.delete()
return Response(status=500)

View File

@ -1,4 +1,6 @@
"""authentik administration overview"""
from os import environ
from django.core.cache import cache
from drf_spectacular.utils import extend_schema
from packaging.version import parse
@ -8,7 +10,7 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik import __version__, get_build_hash
from authentik import ENV_GIT_HASH_KEY, __version__
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.core.api.utils import PassiveSerializer
@ -23,7 +25,7 @@ class VersionSerializer(PassiveSerializer):
def get_build_hash(self, _) -> str:
"""Get build hash, if version is not latest or released"""
return get_build_hash()
return environ.get(ENV_GIT_HASH_KEY, "")
def get_version_current(self, _) -> str:
"""Get current version"""
@ -39,7 +41,9 @@ class VersionSerializer(PassiveSerializer):
def get_outdated(self, instance) -> bool:
"""Check if we're running the latest version"""
return parse(self.get_version_current(instance)) < parse(self.get_version_latest(instance))
return parse(self.get_version_current(instance)) < parse(
self.get_version_latest(instance)
)
class VersionView(APIView):

View File

@ -1,6 +1,6 @@
"""authentik administration overview"""
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from prometheus_client import Gauge
from rest_framework.fields import IntegerField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
@ -9,17 +9,18 @@ from rest_framework.views import APIView
from authentik.root.celery import CELERY_APP
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
class WorkerView(APIView):
"""Get currently connected worker count."""
permission_classes = [IsAdminUser]
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
@extend_schema(
responses=inline_serializer("Workers", fields={"count": IntegerField()})
)
def get(self, request: Request) -> Response:
"""Get currently connected worker count."""
count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover
count += 1
return Response({"count": count})

View File

@ -1,20 +1,10 @@
"""authentik admin app config"""
from prometheus_client import Gauge, Info
from authentik.blueprints.manager import ManagedAppConfig
PROM_INFO = Info("authentik_version", "Currently running authentik version")
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
from django.apps import AppConfig
class AuthentikAdminConfig(ManagedAppConfig):
class AuthentikAdminConfig(AppConfig):
"""authentik admin app config"""
name = "authentik.admin"
label = "authentik_admin"
verbose_name = "authentik Admin"
default = True
def reconcile_load_admin_signals(self):
"""Load admin signals"""
self.import_module("authentik.admin.signals")

View File

@ -1,12 +1,10 @@
"""authentik admin settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"admin_latest_version": {
"task": "authentik.admin.tasks.update_latest_version",
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
"schedule": crontab(minute="*/60"), # Run every hour
"options": {"queue": "authentik_scheduled"},
}
}

View File

@ -1,23 +0,0 @@
"""admin signals"""
from django.dispatch import receiver
from authentik.admin.api.tasks import TaskInfo
from authentik.admin.apps import GAUGE_WORKERS
from authentik.root.celery import CELERY_APP
from authentik.root.monitoring import monitoring_set
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
count = len(CELERY_APP.control.ping(timeout=0.5))
GAUGE_WORKERS.set(count)
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_tasks(sender, **kwargs):
"""Set task gauges"""
for task in TaskInfo.all().values():
task.set_prom_metrics()

View File

@ -1,24 +1,17 @@
"""authentik admin tasks"""
import re
from os import environ
from django.core.cache import cache
from django.core.validators import URLValidator
from django.db import DatabaseError, InternalError, ProgrammingError
from packaging.version import parse
from requests import RequestException
from prometheus_client import Info
from requests import RequestException, get
from structlog.stdlib import get_logger
from authentik import __version__, get_build_hash
from authentik.admin.apps import PROM_INFO
from authentik.events.models import Event, EventAction, Notification
from authentik.events.monitored_tasks import (
MonitoredTask,
TaskResult,
TaskResultStatus,
prefill_task,
)
from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session
from authentik import ENV_GIT_HASH_KEY, __version__
from authentik.events.models import Event, EventAction
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@ -26,7 +19,7 @@ VERSION_CACHE_KEY = "authentik_latest_version"
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
# Chop of the first ^ because we want to search the entire string
URL_FINDER = URLValidator.regex.pattern[1:]
LOCAL_VERSION = parse(__version__)
PROM_INFO = Info("authentik_version", "Currently running authentik version")
def _set_prom_info():
@ -35,48 +28,33 @@ def _set_prom_info():
{
"version": __version__,
"latest": cache.get(VERSION_CACHE_KEY, ""),
"build_hash": get_build_hash(),
"build_hash": environ.get(ENV_GIT_HASH_KEY, ""),
}
)
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError),
)
def clear_update_notifications():
"""Clear update notifications on startup if the notification was for the version
we're running now."""
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
if "new_version" not in notification.event.context:
continue
notification_version = notification.event.context["new_version"]
if LOCAL_VERSION >= parse(notification_version):
notification.delete()
@CELERY_APP.task(bind=True, base=MonitoredTask)
@prefill_task
def update_latest_version(self: MonitoredTask):
"""Update latest version info"""
if CONFIG.y_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
return
try:
response = get_http_session().get(
"https://version.goauthentik.io/version.json",
response = get(
"https://api.github.com/repos/goauthentik/authentik/releases/latest"
)
response.raise_for_status()
data = response.json()
upstream_version = data.get("stable", {}).get("version")
tag_name = data.get("tag_name")
upstream_version = tag_name.split("/")[1]
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
self.set_status(
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
TaskResult(
TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"]
)
)
_set_prom_info()
# Check if upstream version is newer than what we're running,
# and if no event exists yet, create one.
if LOCAL_VERSION < parse(upstream_version):
local_version = parse(__version__)
if local_version < parse(upstream_version):
# Event has already been created, don't create duplicate
if Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
@ -84,7 +62,7 @@ def update_latest_version(self: MonitoredTask):
).exists():
return
event_dict = {"new_version": upstream_version}
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
if match := re.search(URL_FINDER, data.get("body", "")):
event_dict["message"] = f"Changelog: {match.group()}"
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
except (RequestException, IndexError) as exc:

View File

@ -5,7 +5,6 @@ from django.test import TestCase
from django.urls import reverse
from authentik import __version__
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Group, User
from authentik.core.tasks import clean_expired_models
from authentik.events.monitored_tasks import TaskResultStatus
@ -28,7 +27,9 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
self.assertTrue(
any(task["task_name"] == "clean_expired_models" for task in body)
)
def test_tasks_single(self):
"""Test Task API (read single)"""
@ -44,7 +45,9 @@ class TestAdminAPI(TestCase):
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
self.assertEqual(body["task_name"], "clean_expired_models")
response = self.client.get(
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
reverse(
"authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"}
)
)
self.assertEqual(response.status_code, 404)
@ -93,7 +96,6 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:apps-list"))
self.assertEqual(response.status_code, 200)
@reconcile_app("authentik_outposts")
def test_system(self):
"""Test system API"""
response = self.client.get(reverse("authentik_api:admin_system"))

View File

@ -1,83 +1,81 @@
"""test admin tasks"""
import json
from dataclasses import dataclass
from unittest.mock import Mock, patch
from django.core.cache import cache
from django.test import TestCase
from requests_mock import Mocker
from requests.exceptions import RequestException
from authentik.admin.tasks import (
VERSION_CACHE_KEY,
clear_update_notifications,
update_latest_version,
)
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
RESPONSE_VALID = {
"$schema": "https://version.goauthentik.io/schema.json",
"stable": {
"version": "99999999.9999999",
"changelog": "See https://goauthentik.io/test",
"reason": "bugfix",
},
}
@dataclass
class MockResponse:
"""Mock class to emulate the methods of requests's Response we need"""
status_code: int
response: str
def json(self) -> dict:
"""Get json parsed response"""
return json.loads(self.response)
def raise_for_status(self):
"""raise RequestException if status code is 400 or more"""
if self.status_code >= 400:
raise RequestException
REQUEST_MOCK_VALID = Mock(
return_value=MockResponse(
200,
"""{
"tag_name": "version/99999999.9999999",
"body": "https://goauthentik.io/test"
}""",
)
)
REQUEST_MOCK_INVALID = Mock(return_value=MockResponse(400, "{}"))
class TestAdminTasks(TestCase):
"""test admin tasks"""
@patch("authentik.admin.tasks.get", REQUEST_MOCK_VALID)
def test_version_valid_response(self):
"""Test Update checker with valid response"""
with Mocker() as mocker, CONFIG.patch("disable_update_check", False):
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
self.assertTrue(
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
self.assertTrue(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
).exists()
)
# test that a consecutive check doesn't create a duplicate event
update_latest_version.delay().get()
self.assertEqual(
len(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
).exists()
)
# test that a consecutive check doesn't create a duplicate event
update_latest_version.delay().get()
self.assertEqual(
len(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
)
),
1,
)
)
),
1,
)
@patch("authentik.admin.tasks.get", REQUEST_MOCK_INVALID)
def test_version_error(self):
"""Test Update checker with invalid response"""
with Mocker() as mocker:
mocker.get("https://version.goauthentik.io/version.json", status_code=400)
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
self.assertFalse(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
).exists()
)
def test_version_disabled(self):
"""Test Update checker while its disabled"""
with CONFIG.patch("disable_update_check", True):
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
def test_clear_update_notifications(self):
"""Test clear of previous notification"""
Event.objects.create(
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
)
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
clear_update_notifications()
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
self.assertFalse(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
).exists()
)

View File

@ -1,91 +1,57 @@
"""API Authentication"""
from typing import Any, Optional
from base64 import b64decode
from binascii import Error
from typing import Any, Optional, Union
from django.conf import settings
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request
from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import RefreshToken
LOGGER = get_logger()
def validate_auth(header: bytes) -> str:
"""Validate that the header is in a correct format,
returns type and credentials"""
auth_credentials = header.decode().strip()
# pylint: disable=too-many-return-statements
def token_from_header(raw_header: bytes) -> Optional[Token]:
"""raw_header in the Format of `Bearer dGVzdDp0ZXN0`"""
auth_credentials = raw_header.decode()
if auth_credentials == "" or " " not in auth_credentials:
return None
auth_type, _, auth_credentials = auth_credentials.partition(" ")
if auth_type.lower() != "bearer":
if auth_type.lower() not in ["basic", "bearer"]:
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
raise AuthenticationFailed("Unsupported authentication type")
if auth_credentials == "": # nosec # noqa
password = auth_credentials
if auth_type.lower() == "basic":
try:
auth_credentials = b64decode(auth_credentials.encode()).decode()
except (UnicodeDecodeError, Error):
raise AuthenticationFailed("Malformed header")
# Accept credentials with username and without
if ":" in auth_credentials:
_, password = auth_credentials.split(":")
else:
password = auth_credentials
if password == "": # nosec
raise AuthenticationFailed("Malformed header")
return auth_credentials
def bearer_auth(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
auth_credentials = validate_auth(raw_header)
if not auth_credentials:
return None
# first, check traditional tokens
key_token = Token.filter_not_expired(
key=auth_credentials, intent=TokenIntents.INTENT_API
).first()
if key_token:
CTX_AUTH_VIA.set("api_token")
return key_token.user
# then try to auth via JWT
jwt_token = RefreshToken.filter_not_expired(
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
).first()
if jwt_token:
# Double-check scopes, since they are saved in a single string
# we want to check the parsed version too
if SCOPE_AUTHENTIK_API not in jwt_token.scope:
raise AuthenticationFailed("Token invalid/expired")
CTX_AUTH_VIA.set("jwt")
return jwt_token.user
# then try to auth via secret key (for embedded outpost/etc)
user = token_secret_key(auth_credentials)
if user:
CTX_AUTH_VIA.set("secret_key")
return user
raise AuthenticationFailed("Token invalid/expired")
def token_secret_key(value: str) -> Optional[User]:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
from authentik.outposts.apps import MANAGED_OUTPOST
if value != settings.SECRET_KEY:
return None
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts:
return None
outpost = outposts.first()
return outpost.user
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
if not tokens.exists():
raise AuthenticationFailed("Token invalid/expired")
return tokens.first()
class TokenAuthentication(BaseAuthentication):
"""Token-based authentication using HTTP Bearer authentication"""
def authenticate(self, request: Request) -> tuple[User, Any] | None:
def authenticate(self, request: Request) -> Union[tuple[User, Any], None]:
"""Token-based authentication using HTTP Bearer authentication"""
auth = get_authorization_header(request)
user = bearer_auth(auth)
token = token_from_header(auth)
# None is only returned when the header isn't set.
if not user:
if not token:
return None
return (user, None) # pragma: no cover
return (token.user, None) # pragma: no cover

View File

@ -12,8 +12,6 @@ class OwnerFilter(BaseFilterBackend):
owner_key = "user"
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
if request.user.is_superuser:
return queryset
return queryset.filter(**{self.owner_key: request.user})
@ -35,12 +33,3 @@ class OwnerPermissions(BasePermission):
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -5,12 +5,11 @@ from typing import Callable, Optional
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
LOGGER = get_logger()
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
def permission_required(
perm: Optional[str] = None, other_perms: Optional[list[str]] = None
):
"""Check permissions for a single custom action"""
def wrapper_outter(func: Callable):
@ -21,12 +20,10 @@ def permission_required(perm: Optional[str] = None, other_perms: Optional[list[s
if perm:
obj = self.get_object()
if not request.user.has_perm(perm, obj):
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
return self.permission_denied(request)
if other_perms:
for other_perm in other_perms:
if not request.user.has_perm(other_perm):
LOGGER.debug("denying access for other", user=request.user, perm=perm)
return self.permission_denied(request)
return func(self, request, *args, **kwargs)

View File

@ -11,7 +11,7 @@ from drf_spectacular.types import OpenApiTypes
def build_standard_type(obj, **kwargs):
"""Build a basic type with optional add owns."""
"""Build a basic type with optional add ons."""
schema = build_basic_type(obj)
schema.update(kwargs)
return schema
@ -31,7 +31,7 @@ VALIDATION_ERROR = build_object_type(
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
"code": build_standard_type(OpenApiTypes.STR),
},
required=[],
required=["detail"],
additionalProperties={},
)
@ -60,30 +60,12 @@ def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
for path in result["paths"].values():
for method in path.values():
method["responses"].setdefault(
"400",
{
"content": {
"application/json": {
"schema": validation_error.ref,
}
},
"description": "",
},
)
method["responses"].setdefault(
"403",
{
"content": {
"application/json": {
"schema": generic_error.ref,
}
},
"description": "",
},
)
method["responses"].setdefault("400", validation_error.ref)
method["responses"].setdefault("403", generic_error.ref)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
result["components"] = generator.registry.build(
spectacular_settings.APPEND_COMPONENTS
)
# This is a workaround for authentik/stages/prompt/stage.py
# since the serializer PromptChallengeResponse

View File

@ -8,6 +8,9 @@ API Browser - {{ tenant.branding_title }}
{% block head %}
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
{% endblock %}
{% block body %}
<script>
function getCookie(name) {
let cookieValue = "";
@ -27,62 +30,20 @@ function getCookie(name) {
window.addEventListener('DOMContentLoaded', (event) => {
const rapidocEl = document.querySelector('rapi-doc');
rapidocEl.addEventListener('before-try', (e) => {
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
e.detail.request.headers.append('X-CSRFToken', getCookie("authentik_csrf"));
});
});
</script>
<style>
img.logo {
width: 100%;
padding: 1rem 0.5rem 1.5rem 0.5rem;
min-height: 48px;
}
</style>
{% endblock %}
{% block body %}
<rapi-doc
spec-url="{{ path }}"
heading-text=""
theme="light"
render-style="read"
default-schema-tab="schema"
heading-text="authentik"
theme="dark"
render-style="view"
primary-color="#fd4b2d"
nav-bg-color="#212427"
bg-color="#000000"
text-color="#000000"
nav-text-color="#ffffff"
nav-hover-bg-color="#3c3f42"
nav-accent-color="#4f5255"
nav-hover-text-color="#ffffff"
use-path-in-nav-bar="true"
nav-item-spacing="relaxed"
allow-server-selection="false"
show-header="false"
allow-spec-url-load="false"
allow-spec-file-load="false">
<div slot="nav-logo">
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
<div slot="logo">
<img src="{% static 'dist/assets/icons/icon.png' %}" style="width:50px; height:50px" />
</div>
</rapi-doc>
<script>
const rapidoc = document.querySelector("rapi-doc");
const matcher = window.matchMedia("(prefers-color-scheme: light)");
const changer = (ev) => {
const style = getComputedStyle(document.documentElement);
let bg, text = "";
if (matcher.matches) {
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
text = style.getPropertyValue('--pf-global--Color--300');
} else {
bg = style.getPropertyValue('--ak-dark-background');
text = style.getPropertyValue('--ak-dark-foreground');
}
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
rapidoc.requestUpdate();
};
matcher.addEventListener("change", changer);
window.addEventListener("load", changer);
</script>
{% endblock %}

View File

@ -1,78 +1,49 @@
"""Test API Authentication"""
from base64 import b64encode
from django.conf import settings
from django.test import TestCase
from guardian.shortcuts import get_anonymous_user
from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
from authentik.api.authentication import token_from_header
from authentik.core.models import Token, TokenIntents
class TestAPIAuth(TestCase):
"""Test API Authentication"""
def test_valid_basic(self):
"""Test valid token"""
token = Token.objects.create(
intent=TokenIntents.INTENT_API, user=get_anonymous_user()
)
auth = b64encode(f":{token.key}".encode()).decode()
self.assertEqual(token_from_header(f"Basic {auth}".encode()), token)
def test_valid_bearer(self):
"""Test valid token"""
token = Token.objects.create(
intent=TokenIntents.INTENT_API, user=get_anonymous_user()
)
self.assertEqual(token_from_header(f"Bearer {token.key}".encode()), token)
def test_invalid_type(self):
"""Test invalid type"""
with self.assertRaises(AuthenticationFailed):
bearer_auth("foo bar".encode())
token_from_header("foo bar".encode())
def test_invalid_empty(self):
"""Test invalid type"""
self.assertIsNone(bearer_auth("Bearer ".encode()))
self.assertIsNone(bearer_auth("".encode()))
def test_invalid_decode(self):
"""Test invalid bas64"""
with self.assertRaises(AuthenticationFailed):
token_from_header("Basic bar".encode())
def test_invalid_empty_password(self):
"""Test invalid with empty password"""
with self.assertRaises(AuthenticationFailed):
token_from_header("Basic :".encode())
def test_invalid_no_token(self):
"""Test invalid with no token"""
with self.assertRaises(AuthenticationFailed):
auth = b64encode(":abc".encode()).decode()
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
def test_bearer_valid(self):
"""Test valid token"""
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
def test_managed_outpost(self):
"""Test managed outpost"""
with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
@reconcile_app("authentik_outposts")
def test_managed_outpost_success(self):
"""Test managed outpost"""
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
def test_jwt_valid(self):
"""Test valid JWT"""
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
refresh_token=generate_id(),
_scope=SCOPE_AUTHENTIK_API,
)
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
def test_jwt_missing_scope(self):
"""Test valid JWT"""
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
refresh_token=generate_id(),
_scope="",
)
with self.assertRaises(AuthenticationFailed):
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
self.assertIsNone(token_from_header(f"Basic :{auth}".encode()))

View File

@ -1,29 +0,0 @@
"""authentik API Modelviewset tests"""
from typing import Callable
from django.test import TestCase
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from authentik.api.v3.urls import router
class TestModelViewSets(TestCase):
"""Test Viewset"""
def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
"""Test Viewset"""
def tester(self: TestModelViewSets):
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
filterset_class = getattr(test_viewset, "filterset_class", None)
if not filterset_class:
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
return tester
for _, viewset, _ in router.registry:
if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
continue
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))

View File

@ -1,8 +1,8 @@
"""authentik api urls"""
from django.urls import include, path
from authentik.api.v3.urls import urlpatterns as v3_urls
from authentik.api.v2.urls import urlpatterns as v2_urls
urlpatterns = [
path("v3/", include(v3_urls)),
path("v2beta/", include(v2_urls)),
]

View File

@ -0,0 +1,70 @@
"""core Configs API"""
from os import environ, path
from django.conf import settings
from django.db import models
from drf_spectacular.utils import extend_schema
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
from rest_framework.fields import BooleanField, CharField, ChoiceField, ListField
from rest_framework.permissions import AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.geo import GEOIP_READER
from authentik.lib.config import CONFIG
class Capabilities(models.TextChoices):
"""Define capabilities which influence which APIs can/should be used"""
CAN_SAVE_MEDIA = "can_save_media"
CAN_GEO_IP = "can_geo_ip"
CAN_BACKUP = "can_backup"
class ConfigSerializer(PassiveSerializer):
"""Serialize authentik Config into DRF Object"""
error_reporting_enabled = BooleanField(read_only=True)
error_reporting_environment = CharField(read_only=True)
error_reporting_send_pii = BooleanField(read_only=True)
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
class ConfigView(APIView):
"""Read-only view set that returns the current session's Configs"""
permission_classes = [AllowAny]
def get_capabilities(self) -> list[Capabilities]:
"""Get all capabilities this server instance supports"""
caps = []
deb_test = settings.DEBUG or settings.TEST
if path.ismount(settings.MEDIA_ROOT) or deb_test:
caps.append(Capabilities.CAN_SAVE_MEDIA)
if GEOIP_READER.enabled:
caps.append(Capabilities.CAN_GEO_IP)
if SERVICE_HOST_ENV_NAME in environ:
# Running in k8s, only s3 backup is supported
if CONFIG.y_bool("postgresql.s3_backup"):
caps.append(Capabilities.CAN_BACKUP)
else:
# Running in compose, backup is always supported
caps.append(Capabilities.CAN_BACKUP)
return caps
@extend_schema(responses={200: ConfigSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Retrive public configuration options"""
config = ConfigSerializer(
{
"error_reporting_enabled": CONFIG.y("error_reporting.enabled"),
"error_reporting_environment": CONFIG.y("error_reporting.environment"),
"error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"),
"capabilities": self.get_capabilities(),
}
)
return Response(config.data)

View File

@ -0,0 +1,38 @@
"""Sentry tunnel"""
from json import loads
from django.conf import settings
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.views.generic.base import View
from requests import post
from requests.exceptions import RequestException
from authentik.lib.config import CONFIG
class SentryTunnelView(View):
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
# Only allow usage of this endpoint when error reporting is enabled
if not CONFIG.y_bool("error_reporting.enabled", False):
return HttpResponse(status=400)
# Body is 2 json objects separated by \n
full_body = request.body
header = loads(full_body.splitlines()[0])
# Check that the DSN is what we expect
dsn = header.get("dsn", "")
if dsn != settings.SENTRY_DSN:
return HttpResponse(status=400)
response = post(
"https://sentry.beryju.org/api/8/envelope/",
data=full_body,
headers={"Content-Type": "application/octet-stream"},
)
try:
response.raise_for_status()
except RequestException:
return HttpResponse(status=500)
return HttpResponse(status=response.status_code)

View File

@ -1,6 +1,6 @@
"""api v3 urls"""
"""api v2 urls"""
from django.urls import path
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt
from drf_spectacular.views import SpectacularAPIView
from rest_framework import routers
@ -10,29 +10,26 @@ from authentik.admin.api.system import SystemView
from authentik.admin.api.tasks import TaskViewSet
from authentik.admin.api.version import VersionView
from authentik.admin.api.workers import WorkerView
from authentik.api.v3.config import ConfigView
from authentik.api.v2.config import ConfigView
from authentik.api.v2.sentry import SentryTunnelView
from authentik.api.views import APIBrowserView
from authentik.blueprints.api import BlueprintInstanceViewSet
from authentik.core.api.applications import ApplicationViewSet
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
from authentik.core.api.groups import GroupViewSet
from authentik.core.api.propertymappings import PropertyMappingViewSet
from authentik.core.api.providers import ProviderViewSet
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
from authentik.core.api.sources import SourceViewSet
from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.users import UserViewSet
from authentik.crypto.api import CertificateKeyPairViewSet
from authentik.events.api.events import EventViewSet
from authentik.events.api.notification_mappings import NotificationWebhookMappingViewSet
from authentik.events.api.notification_rules import NotificationRuleViewSet
from authentik.events.api.notification_transports import NotificationTransportViewSet
from authentik.events.api.notifications import NotificationViewSet
from authentik.events.api.event import EventViewSet
from authentik.events.api.notification import NotificationViewSet
from authentik.events.api.notification_rule import NotificationRuleViewSet
from authentik.events.api.notification_transport import NotificationTransportViewSet
from authentik.flows.api.bindings import FlowStageBindingViewSet
from authentik.flows.api.flows import FlowViewSet
from authentik.flows.api.stages import StageViewSet
from authentik.flows.views.executor import FlowExecutorView
from authentik.flows.views.inspector import FlowInspectorView
from authentik.flows.views import FlowExecutorView
from authentik.outposts.api.outposts import OutpostViewSet
from authentik.outposts.api.service_connections import (
DockerServiceConnectionViewSet,
@ -47,29 +44,35 @@ from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
from authentik.policies.expression.api import ExpressionPolicyViewSet
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
from authentik.policies.password.api import PasswordPolicyViewSet
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
from authentik.policies.reputation.api import (
IPReputationViewSet,
ReputationPolicyViewSet,
UserReputationViewSet,
)
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
from authentik.providers.oauth2.api.tokens import (
AuthorizationCodeViewSet,
RefreshTokenViewSet,
)
from authentik.providers.proxy.api import (
ProxyOutpostConfigViewSet,
ProxyProviderViewSet,
)
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
from authentik.sources.oauth.api.source import OAuthSourceViewSet
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
from authentik.sources.plex.api.source import PlexSourceViewSet
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
from authentik.sources.oauth.api.source_connection import (
UserOAuthSourceConnectionViewSet,
)
from authentik.sources.plex.api import PlexSourceViewSet
from authentik.sources.saml.api import SAMLSourceViewSet
from authentik.stages.authenticator_duo.api import (
AuthenticatorDuoStageViewSet,
DuoAdminDeviceViewSet,
DuoDeviceViewSet,
)
from authentik.stages.authenticator_sms.api import (
AuthenticatorSMSStageViewSet,
SMSAdminDeviceViewSet,
SMSDeviceViewSet,
)
from authentik.stages.authenticator_static.api import (
AuthenticatorStaticStageViewSet,
StaticAdminDeviceViewSet,
@ -80,7 +83,9 @@ from authentik.stages.authenticator_totp.api import (
TOTPAdminDeviceViewSet,
TOTPDeviceViewSet,
)
from authentik.stages.authenticator_validate.api import AuthenticatorValidateStageViewSet
from authentik.stages.authenticator_validate.api import (
AuthenticatorValidateStageViewSet,
)
from authentik.stages.authenticator_webauthn.api import (
AuthenticateWebAuthnStageViewSet,
WebAuthnAdminDeviceViewSet,
@ -102,7 +107,6 @@ from authentik.stages.user_write.api import UserWriteStageViewSet
from authentik.tenants.api import TenantViewSet
router = routers.DefaultRouter()
router.include_format_suffixes = False
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
router.register("admin/apps", AppsViewSet, basename="apps")
@ -118,7 +122,9 @@ router.register("core/tenants", TenantViewSet)
router.register("outposts/instances", OutpostViewSet)
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
router.register(
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
)
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
@ -132,12 +138,8 @@ router.register("events/notifications", NotificationViewSet)
router.register("events/transports", NotificationTransportViewSet)
router.register("events/rules", NotificationRuleViewSet)
router.register("managed/blueprints", BlueprintInstanceViewSet)
router.register("sources/all", SourceViewSet)
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
router.register("sources/oauth_user_connections", UserOAuthSourceConnectionViewSet)
router.register("sources/ldap", LDAPSourceViewSet)
router.register("sources/saml", SAMLSourceViewSet)
router.register("sources/oauth", OAuthSourceViewSet)
@ -150,7 +152,8 @@ router.register("policies/event_matcher", EventMatcherPolicyViewSet)
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
router.register("policies/password", PasswordPolicyViewSet)
router.register("policies/reputation/scores", ReputationViewSet)
router.register("policies/reputation/users", UserReputationViewSet)
router.register("policies/reputation/ips", IPReputationViewSet)
router.register("policies/reputation", ReputationPolicyViewSet)
router.register("providers/all", ProviderViewSet)
@ -166,35 +169,24 @@ router.register("propertymappings/all", PropertyMappingViewSet)
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
router.register("propertymappings/scope", ScopeMappingViewSet)
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
router.register("authenticators/all", DeviceViewSet, basename="device")
router.register("authenticators/duo", DuoDeviceViewSet)
router.register("authenticators/sms", SMSDeviceViewSet)
router.register("authenticators/static", StaticDeviceViewSet)
router.register("authenticators/totp", TOTPDeviceViewSet)
router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
router.register(
"authenticators/admin/all",
AdminDeviceViewSet,
basename="admin-device",
)
router.register(
"authenticators/admin/duo",
DuoAdminDeviceViewSet,
basename="admin-duodevice",
)
router.register(
"authenticators/admin/sms",
SMSAdminDeviceViewSet,
basename="admin-smsdevice",
)
router.register(
"authenticators/admin/static",
StaticAdminDeviceViewSet,
basename="admin-staticdevice",
)
router.register("authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice")
router.register(
"authenticators/admin/totp", TOTPAdminDeviceViewSet, basename="admin-totpdevice"
)
router.register(
"authenticators/admin/webauthn",
WebAuthnAdminDeviceViewSet,
@ -203,7 +195,6 @@ router.register(
router.register("stages/all", StageViewSet)
router.register("stages/authenticator/duo", AuthenticatorDuoStageViewSet)
router.register("stages/authenticator/sms", AuthenticatorSMSStageViewSet)
router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet)
router.register("stages/authenticator/totp", AuthenticatorTOTPStageViewSet)
router.register("stages/authenticator/validate", AuthenticatorValidateStageViewSet)
@ -246,11 +237,7 @@ urlpatterns = (
FlowExecutorView.as_view(),
name="flow-executor",
),
path(
"flows/inspector/<slug:flow_slug>/",
FlowInspectorView.as_view(),
name="flow-inspector",
),
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
path("sentry/", csrf_exempt(SentryTunnelView.as_view()), name="sentry"),
path("schema/", SpectacularAPIView.as_view(), name="schema"),
]
)

View File

@ -1,92 +0,0 @@
"""core Configs API"""
from os import path
from django.conf import settings
from django.db import models
from drf_spectacular.utils import extend_schema
from rest_framework.fields import (
BooleanField,
CharField,
ChoiceField,
FloatField,
IntegerField,
ListField,
)
from rest_framework.permissions import AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.api.utils import PassiveSerializer
from authentik.events.geo import GEOIP_READER
from authentik.lib.config import CONFIG
class Capabilities(models.TextChoices):
"""Define capabilities which influence which APIs can/should be used"""
CAN_SAVE_MEDIA = "can_save_media"
CAN_GEO_IP = "can_geo_ip"
CAN_IMPERSONATE = "can_impersonate"
class ErrorReportingConfigSerializer(PassiveSerializer):
"""Config for error reporting"""
enabled = BooleanField(read_only=True)
environment = CharField(read_only=True)
send_pii = BooleanField(read_only=True)
traces_sample_rate = FloatField(read_only=True)
class ConfigSerializer(PassiveSerializer):
"""Serialize authentik Config into DRF Object"""
error_reporting = ErrorReportingConfigSerializer(required=True)
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
cache_timeout = IntegerField(required=True)
cache_timeout_flows = IntegerField(required=True)
cache_timeout_policies = IntegerField(required=True)
cache_timeout_reputation = IntegerField(required=True)
class ConfigView(APIView):
"""Read-only view set that returns the current session's Configs"""
permission_classes = [AllowAny]
def get_capabilities(self) -> list[Capabilities]:
"""Get all capabilities this server instance supports"""
caps = []
deb_test = settings.DEBUG or settings.TEST
if path.ismount(settings.MEDIA_ROOT) or deb_test:
caps.append(Capabilities.CAN_SAVE_MEDIA)
if GEOIP_READER.enabled:
caps.append(Capabilities.CAN_GEO_IP)
if CONFIG.y_bool("impersonation"):
caps.append(Capabilities.CAN_IMPERSONATE)
return caps
def get_config(self) -> ConfigSerializer:
"""Get Config"""
return ConfigSerializer(
{
"error_reporting": {
"enabled": CONFIG.y("error_reporting.enabled"),
"environment": CONFIG.y("error_reporting.environment"),
"send_pii": CONFIG.y("error_reporting.send_pii"),
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
},
"capabilities": self.get_capabilities(),
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
"cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
}
)
@extend_schema(responses={200: ConfigSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Retrieve public configuration options"""
return Response(self.get_config().data)

View File

@ -1,109 +0,0 @@
"""Serializer mixin for managed models"""
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField, JSONField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
class ManagedSerializer:
"""Managed Serializer"""
managed = CharField(read_only=True, allow_null=True)
class MetadataSerializer(PassiveSerializer):
"""Serializer for blueprint metadata"""
name = CharField()
labels = JSONField()
class BlueprintInstanceSerializer(ModelSerializer):
"""Info about a single blueprint instance file"""
def validate_path(self, path: str) -> str:
"""Ensure the path specified is retrievable"""
try:
BlueprintInstance(path=path).retrieve()
except BlueprintRetrievalFailed as exc:
raise ValidationError(exc) from exc
return path
class Meta:
model = BlueprintInstance
fields = [
"pk",
"name",
"path",
"context",
"last_applied",
"last_applied_hash",
"status",
"enabled",
"managed_models",
"metadata",
]
extra_kwargs = {
"status": {"read_only": True},
"last_applied": {"read_only": True},
"last_applied_hash": {"read_only": True},
"managed_models": {"read_only": True},
"metadata": {"read_only": True},
}
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
permission_classes = [IsAdminUser]
serializer_class = BlueprintInstanceSerializer
queryset = BlueprintInstance.objects.all()
search_fields = ["name", "path"]
filterset_fields = ["name", "path"]
@extend_schema(
responses={
200: ListSerializer(
child=inline_serializer(
"BlueprintFile",
fields={
"path": CharField(),
"last_m": DateTimeField(),
"hash": CharField(),
"meta": MetadataSerializer(required=False, read_only=True),
},
)
)
}
)
@action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response:
"""Get blueprints"""
files: list[dict] = blueprints_find_dict.delay().get()
return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance")
@extend_schema(
request=None,
responses={
200: BlueprintInstanceSerializer(),
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
def apply(self, request: Request, *args, **kwargs) -> Response:
"""Apply a blueprint"""
blueprint = self.get_object()
apply_blueprint.delay(str(blueprint.pk)).get()
return self.retrieve(request, *args, **kwargs)

View File

@ -1,22 +0,0 @@
"""authentik Blueprints app"""
from authentik.blueprints.manager import ManagedAppConfig
class AuthentikBlueprintsConfig(ManagedAppConfig):
"""authentik Blueprints app"""
name = "authentik.blueprints"
label = "authentik_blueprints"
verbose_name = "authentik Blueprints"
default = True
def reconcile_load_blueprints_v1_tasks(self):
"""Load v1 tasks"""
self.import_module("authentik.blueprints.v1.tasks")
def reconcile_blueprints_discover(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discover
blueprints_discover.delay()

View File

@ -1,28 +0,0 @@
"""Apply blueprint from commandline"""
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
LOGGER = get_logger()
class Command(BaseCommand):
"""Apply blueprint from commandline"""
@no_translations
def handle(self, *args, **options):
"""Apply all blueprints in order, abort when one fails to import"""
for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer(content)
valid, logs = importer.validate()
if not valid:
for log in logs:
LOGGER.debug(**log)
raise ValueError("blueprint invalid")
importer.apply()
def add_arguments(self, parser):
parser.add_argument("blueprints", nargs="+", type=str)

View File

@ -1,17 +0,0 @@
"""Export blueprint of current authentik install"""
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.v1.exporter import Exporter
LOGGER = get_logger()
class Command(BaseCommand):
"""Export blueprint of current authentik install"""
@no_translations
def handle(self, *args, **options):
"""Export blueprint of current authentik install"""
exporter = Exporter()
self.stdout.write(exporter.export_to_string())

View File

@ -1,38 +0,0 @@
"""Generate JSON Schema for blueprints"""
from json import dumps, loads
from pathlib import Path
from django.apps import apps
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
class Command(BaseCommand):
"""Generate JSON Schema for blueprints"""
schema: dict
@no_translations
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
path = Path(__file__).parent.joinpath("./schema_template.json")
with open(path, "r", encoding="utf-8") as _template_file:
self.schema = loads(_template_file.read())
self.set_model_allowed()
self.stdout.write(dumps(self.schema, indent=4))
def set_model_allowed(self):
"""Set model enum"""
model_names = []
for model in apps.get_models():
if not is_model_allowed(model):
continue
if SerializerModel not in model.__mro__:
continue
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names

View File

@ -1,90 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "http://example.com/example.json",
"type": "object",
"title": "authentik Blueprint schema",
"default": {},
"required": [
"version",
"entries"
],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"labels": {
"type": "object"
}
}
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": true
},
"entries": {
"type": "array",
"items": {
"$id": "#entry",
"type": "object",
"required": [
"model",
"identifiers"
],
"properties": {
"model": {
"type": "string",
"enum": [
"placeholder"
]
},
"id": {
"type": "string"
},
"attrs": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Commonly available field, may not exist on all models"
}
},
"default": {},
"additionalProperties": true
},
"identifiers": {
"type": "object",
"properties": {
"pk": {
"description": "Commonly available field, may not exist on all models",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"format": "uuid"
}
]
}
},
"additionalProperties": true
}
}
}
}
}
}

View File

@ -1,44 +0,0 @@
"""Managed objects manager"""
from importlib import import_module
from inspect import ismethod
from django.apps import AppConfig
from django.db import DatabaseError, InternalError, ProgrammingError
from structlog.stdlib import BoundLogger, get_logger
LOGGER = get_logger()
class ManagedAppConfig(AppConfig):
"""Basic reconciliation logic for apps"""
_logger: BoundLogger
def __init__(self, app_name: str, *args, **kwargs) -> None:
super().__init__(app_name, *args, **kwargs)
self._logger = get_logger().bind(app_name=app_name)
def ready(self) -> None:
self.reconcile()
return super().ready()
def import_module(self, path: str):
"""Load module"""
import_module(path)
def reconcile(self) -> None:
"""reconcile ourselves"""
prefix = "reconcile_"
for meth_name in dir(self):
meth = getattr(self, meth_name)
if not ismethod(meth):
continue
if not meth_name.startswith(prefix):
continue
name = meth_name.replace(prefix, "")
try:
self._logger.debug("Starting reconciler", name=name)
meth()
self._logger.debug("Successfully reconciled", name=name)
except (DatabaseError, ProgrammingError, InternalError) as exc:
self._logger.debug("Failed to run reconcile", name=name, exc=exc)

View File

@ -1,135 +0,0 @@
# Generated by Django 4.0.6 on 2022-07-31 17:35
import uuid
from glob import glob
from pathlib import Path
import django.contrib.postgres.fields
from dacite import from_dict
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from yaml import load
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
from authentik.lib.config import CONFIG
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
"""Check if blueprint should be imported"""
from authentik.blueprints.models import BlueprintInstanceStatus
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
with open(path, "r", encoding="utf-8") as blueprint_file:
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
if not raw_blueprint:
return
metadata = raw_blueprint.get("metadata", None)
version = raw_blueprint.get("version", 1)
if version != 1:
return
blueprint_file.seek(0)
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
meta = None
if metadata:
meta = from_dict(BlueprintMetadata, metadata)
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
return
if not instance:
instance = BlueprintInstance(
name=meta.name if meta else str(rel_path),
path=str(rel_path),
context={},
status=BlueprintInstanceStatus.UNKNOWN,
enabled=True,
managed_models=[],
last_applied_hash="",
metadata=metadata,
)
instance.save()
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
BlueprintInstance = apps.get_model("authentik_blueprints", "BlueprintInstance")
Flow = apps.get_model("authentik_flows", "Flow")
db_alias = schema_editor.connection.alias
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
check_blueprint_v1_file(BlueprintInstance, Path(file))
for blueprint in BlueprintInstance.objects.using(db_alias).all():
# If we already have flows (and we should always run before flow migrations)
# then this is an existing install and we want to disable all blueprints
if Flow.objects.using(db_alias).all().exists():
blueprint.enabled = False
# System blueprints are always enabled
if blueprint.metadata.get("labels", {}).get(LABEL_AUTHENTIK_SYSTEM, "").lower() == "true":
blueprint.enabled = True
blueprint.save()
class Migration(migrations.Migration):
initial = True
dependencies = [("authentik_flows", "0001_initial")]
operations = [
migrations.CreateModel(
name="BlueprintInstance",
fields=[
("created", models.DateTimeField(auto_now_add=True)),
("last_updated", models.DateTimeField(auto_now=True)),
(
"managed",
models.TextField(
default=None,
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
),
),
(
"instance_uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("name", models.TextField()),
("metadata", models.JSONField(default=dict)),
("path", models.TextField()),
("context", models.JSONField(default=dict)),
("last_applied", models.DateTimeField(auto_now=True)),
("last_applied_hash", models.TextField()),
(
"status",
models.TextField(
choices=[
("successful", "Successful"),
("warning", "Warning"),
("error", "Error"),
("orphaned", "Orphaned"),
("unknown", "Unknown"),
],
default="unknown",
),
),
("enabled", models.BooleanField(default=True)),
(
"managed_models",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(), default=list, size=None
),
),
],
options={
"verbose_name": "Blueprint Instance",
"verbose_name_plural": "Blueprint Instances",
"unique_together": {("name", "path")},
},
),
migrations.RunPython(migration_blueprint_import),
]

View File

@ -1,105 +0,0 @@
"""Managed Object models"""
from pathlib import Path
from urllib.parse import urlparse
from uuid import uuid4
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils.translation import gettext_lazy as _
from requests import RequestException
from rest_framework.serializers import Serializer
from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import get_http_session
class BlueprintRetrievalFailed(SentryIgnoredException):
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
not being accessible or local files not being readable"""
class ManagedModel(models.Model):
"""Model which can be managed by authentik exclusively"""
managed = models.TextField(
default=None,
null=True,
verbose_name=_("Managed by authentik"),
help_text=_(
(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
)
),
unique=True,
)
class Meta:
abstract = True
class BlueprintInstanceStatus(models.TextChoices):
"""Instance status"""
SUCCESSFUL = "successful"
WARNING = "warning"
ERROR = "error"
ORPHANED = "orphaned"
UNKNOWN = "unknown"
class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
"""Instance of a single blueprint. Can be parameterized via context attribute when
blueprint in `path` has inputs."""
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField()
context = models.JSONField(default=dict)
last_applied = models.DateTimeField(auto_now=True)
last_applied_hash = models.TextField()
status = models.TextField(
choices=BlueprintInstanceStatus.choices, default=BlueprintInstanceStatus.UNKNOWN
)
enabled = models.BooleanField(default=True)
managed_models = ArrayField(models.TextField(), default=list)
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if urlparse(self.path).scheme != "":
try:
res = get_http_session().get(self.path, timeout=3, allow_redirects=True)
res.raise_for_status()
return res.text
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with path.open("r", encoding="utf-8") as _file:
return _file.read()
@property
def serializer(self) -> Serializer:
from authentik.blueprints.api import BlueprintInstanceSerializer
return BlueprintInstanceSerializer
def __str__(self) -> str:
return f"Blueprint Instance {self.name}"
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (
(
"name",
"path",
),
)

View File

@ -1,12 +0,0 @@
"""blueprint Settings"""
from celery.schedules import crontab
from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"blueprints_v1_discover": {
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,48 +0,0 @@
"""Blueprint helpers"""
from functools import wraps
from pathlib import Path
from typing import Callable
from django.apps import apps
from authentik.blueprints.manager import ManagedAppConfig
from authentik.blueprints.models import BlueprintInstance
from authentik.lib.config import CONFIG
def apply_blueprint(*files: str):
"""Apply blueprint before test"""
from authentik.blueprints.v1.importer import Importer
def wrapper_outer(func: Callable):
"""Apply blueprint before test"""
@wraps(func)
def wrapper(*args, **kwargs):
for file in files:
content = BlueprintInstance(path=file).retrieve()
Importer(content).apply()
return func(*args, **kwargs)
return wrapper
return wrapper_outer
def reconcile_app(app_name: str):
"""Re-reconcile AppConfig methods"""
def wrapper_outer(func: Callable):
"""Re-reconcile AppConfig methods"""
@wraps(func)
def wrapper(*args, **kwargs):
config = apps.get_app_config(app_name)
if isinstance(config, ManagedAppConfig):
config.reconcile()
return func(*args, **kwargs)
return wrapper
return wrapper_outer

View File

@ -1,34 +0,0 @@
"""authentik managed models tests"""
from typing import Callable, Type
from django.apps import apps
from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
class TestModels(TestCase):
"""Test Models"""
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
"""Test serializer"""
def tester(self: TestModels):
if test_model._meta.abstract:
return
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
self.assertIsNotNone(model_class.serializer)
return tester
for app in apps.get_app_configs():
if not app.label.startswith("authentik"):
continue
for model in app.get_models():
if not is_model_allowed(model):
continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -1,36 +0,0 @@
"""test packaged blueprints"""
from pathlib import Path
from typing import Callable
from django.test import TransactionTestCase
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.tests import apply_blueprint
from authentik.blueprints.v1.importer import Importer
from authentik.tenants.models import Tenant
class TestPackaged(TransactionTestCase):
"""Empty class, test methods are added dynamically"""
@apply_blueprint("default/90-default-tenant.yaml")
def test_decorator_static(self):
"""Test @apply_blueprint decorator"""
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
def blueprint_tester(file_name: Path) -> Callable:
"""This is used instead of subTest for better visibility"""
def tester(self: TestPackaged):
base = Path("blueprints/")
rel_path = Path(file_name).relative_to(base)
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
return tester
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))

View File

@ -1,45 +0,0 @@
"""Test blueprints v1 api"""
from json import loads
from tempfile import NamedTemporaryFile, mkdtemp
from django.urls import reverse
from rest_framework.test import APITestCase
from yaml import dump
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.config import CONFIG
TMP = mkdtemp("authentik-blueprints")
class TestBlueprintsV1API(APITestCase):
"""Test Blueprints API"""
def setUp(self) -> None:
self.user = create_test_admin_user()
self.client.force_login(self.user)
@CONFIG.patch("blueprints_dir", TMP)
def test_api_available(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
res = self.client.get(reverse("authentik_api:blueprintinstance-available"))
self.assertEqual(res.status_code, 200)
response = loads(res.content.decode())
self.assertEqual(len(response), 1)
self.assertEqual(
response[0]["hash"],
(
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1bd1f9b352"
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)

View File

@ -1,148 +0,0 @@
"""Test blueprints v1 tasks"""
from hashlib import sha512
from tempfile import NamedTemporaryFile, mkdtemp
from django.test import TransactionTestCase
from yaml import dump
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id
TMP = mkdtemp("authentik-blueprints")
class TestBlueprintsV1Tasks(TransactionTestCase):
"""Test Blueprints v1 Tasks"""
@CONFIG.patch("blueprints_dir", TMP)
def test_invalid_file_syntax(self):
"""Test syntactically invalid file"""
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
file.write(b"{")
file.flush()
blueprints = blueprints_find()
self.assertEqual(blueprints, [])
@CONFIG.patch("blueprints_dir", TMP)
def test_invalid_file_version(self):
"""Test invalid file"""
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
file.write(b"version: 2")
file.flush()
blueprints = blueprints_find()
self.assertEqual(blueprints, [])
@CONFIG.patch("blueprints_dir", TMP)
def test_valid(self):
"""Test valid file"""
blueprint_id = generate_id()
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
"metadata": {
"name": blueprint_id,
},
}
)
)
file.seek(0)
file_hash = sha512(file.read().encode()).hexdigest()
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
self.assertEqual(instance.last_applied_hash, file_hash)
self.assertEqual(
instance.metadata,
{
"name": blueprint_id,
"labels": {},
},
)
@CONFIG.patch("blueprints_dir", TMP)
def test_valid_updated(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
self.assertEqual(
BlueprintInstance.objects.first().last_applied_hash,
(
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1b"
"d1f9b3526871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)
self.assertEqual(BlueprintInstance.objects.first().metadata, {})
file.write(
dump(
{
"version": 1,
"entries": [],
"metadata": {
"name": "foo",
},
}
)
)
file.flush()
blueprints_discover() # pylint: disable=no-value-for-parameter
self.assertEqual(
BlueprintInstance.objects.first().last_applied_hash,
(
"fc62fea96067da8592bdf90927246d0ca150b045447df93b0652a0e20a8bc327"
"681510b5db37ea98759c61f9a98dd2381f46a3b5a2da69dfb45158897f14e824"
),
)
self.assertEqual(
BlueprintInstance.objects.first().metadata,
{
"name": "foo",
"labels": {},
},
)
@CONFIG.patch("blueprints_dir", TMP)
def test_valid_disabled(self):
"""Test valid file"""
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
file.write(
dump(
{
"version": 1,
"entries": [],
}
)
)
file.flush()
instance: BlueprintInstance = BlueprintInstance.objects.create(
name=generate_id(),
path=file.name,
enabled=False,
status=BlueprintInstanceStatus.UNKNOWN,
)
instance.refresh_from_db()
self.assertEqual(instance.last_applied_hash, "")
self.assertEqual(
instance.status,
BlueprintInstanceStatus.UNKNOWN,
)
apply_blueprint(instance.pk) # pylint: disable=no-value-for-parameter
instance.refresh_from_db()
self.assertEqual(instance.last_applied_hash, "")
self.assertEqual(
instance.status,
BlueprintInstanceStatus.UNKNOWN,
)

View File

@ -1,255 +0,0 @@
"""transfer common classes"""
from collections import OrderedDict
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from typing import Any, Optional
from uuid import UUID
from django.apps import apps
from django.db.models import Model, Q
from rest_framework.fields import Field
from rest_framework.serializers import Serializer
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
from authentik.lib.models import SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.models import PolicyBindingModel
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
serializer: Serializer = obj.serializer(obj)
data = dict(serializer.data)
for field_name, _field in serializer.fields.items():
_field: Field
if field_name not in data:
continue
if _field.read_only:
data.pop(field_name, None)
if _field.get_initial() == data.get(field_name, None):
data.pop(field_name, None)
if field_name.endswith("_set"):
data.pop(field_name, None)
return data
@dataclass
class BlueprintEntryState:
"""State of a single instance"""
instance: Optional[Model] = None
@dataclass
class BlueprintEntry:
"""Single entry of a blueprint"""
identifiers: dict[str, Any]
model: str
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
# pylint: disable=invalid-name
id: Optional[str] = None
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
@staticmethod
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
"""Convert a SerializerModel instance to a blueprint Entry"""
identifiers = {
"pk": model.pk,
}
all_attrs = get_attrs(model)
for extra_identifier_name in extra_identifier_names:
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name)
return BlueprintEntry(
identifiers=identifiers,
model=f"{model._meta.app_label}.{model._meta.model_name}",
attrs=all_attrs,
)
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
"""Check if we have any special tags that need handling"""
if isinstance(value, YAMLTag):
return value.resolve(self, blueprint)
if isinstance(value, dict):
for key, inner_value in value.items():
value[key] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, list):
for idx, inner_value in enumerate(value):
value[idx] = self.tag_resolver(inner_value, blueprint)
return value
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
return self.tag_resolver(self.attrs, blueprint)
def get_identifiers(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
return self.tag_resolver(self.identifiers, blueprint)
@dataclass
class BlueprintMetadata:
"""Optional blueprint metadata"""
name: str
labels: dict[str, str] = field(default_factory=dict)
@dataclass
class Blueprint:
"""Dataclass used for a full export"""
version: int = field(default=1)
entries: list[BlueprintEntry] = field(default_factory=list)
metadata: Optional[BlueprintMetadata] = field(default=None)
context: Optional[dict] = field(default_factory=dict)
class YAMLTag:
"""Base class for all YAML Tags"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag logic"""
raise NotImplementedError
class KeyOf(YAMLTag):
"""Reference another object by their ID"""
id_from: str
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.id_from = node.value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
for _entry in blueprint.entries:
if _entry.id == self.id_from and _entry._state.instance:
# Special handling for PolicyBindingModels, as they'll have a different PK
# which is used when creating policy bindings
if (
isinstance(_entry._state.instance, PolicyBindingModel)
and entry.model.lower() == "authentik_policies.policybinding"
):
return _entry._state.instance.pbm_uuid
return _entry._state.instance.pk
raise ValueError(
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
)
class Context(YAMLTag):
"""Lookup key from instance context"""
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
if isinstance(node, ScalarNode):
self.key = node.value
if isinstance(node, SequenceNode):
self.key = node.value[0].value
self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
value = self.default
if self.key in blueprint.context:
value = blueprint.context[self.key]
return value
class Format(YAMLTag):
"""Format a string"""
format_string: str
args: list[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.format_string = node.value[0].value
self.args = []
for raw_node in node.value[1:]:
self.args.append(raw_node.value)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
return self.format_string % tuple(self.args)
except TypeError as exc:
raise EntryInvalidError(exc)
class Find(YAMLTag):
"""Find any object"""
model_name: str
conditions: list[list]
model_class: type[Model]
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.model_name = node.value[0].value
self.model_class = apps.get_model(*self.model_name.split("."))
self.conditions = []
for raw_node in node.value[1:]:
values = []
for node_values in raw_node.value:
values.append(loader.construct_object(node_values))
self.conditions.append(values)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
query = Q()
for cond in self.conditions:
query &= Q(**{cond[0]: cond[1]})
instance = self.model_class.objects.filter(query).first()
if instance:
return instance.pk
return None
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
default_flow_style = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
def represent(self, data) -> None:
if is_dataclass(data):
def factory(items):
final_dict = dict(items)
final_dict.pop("_state", None)
return final_dict
data = asdict(data, dict_factory=factory)
return super().represent(data)
class BlueprintLoader(SafeLoader):
"""Loader for blueprints with custom tag support"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_constructor("!KeyOf", KeyOf)
self.add_constructor("!Find", Find)
self.add_constructor("!Context", Context)
self.add_constructor("!Format", Format)
class EntryInvalidError(SentryIgnoredException):
"""Error raised when an entry is invalid"""

View File

@ -1,140 +0,0 @@
"""Blueprint exporter"""
from typing import Iterator
from uuid import UUID
from django.apps import apps
from django.db.models import Q
from django.utils.timezone import now
from django.utils.translation import gettext as _
from yaml import dump
from authentik.blueprints.v1.common import (
Blueprint,
BlueprintDumper,
BlueprintEntry,
BlueprintMetadata,
)
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_GENERATED
from authentik.flows.models import Flow, FlowStageBinding, Stage
from authentik.lib.models import SerializerModel
from authentik.policies.models import Policy, PolicyBinding
from authentik.stages.prompt.models import PromptStage
class Exporter:
"""Export flow with attached stages into yaml"""
excluded_models = []
def __init__(self):
self.excluded_models = []
def get_entries(self) -> Iterator[BlueprintEntry]:
"""Get blueprint entries"""
for model in apps.get_models():
if not is_model_allowed(model):
continue
if model in self.excluded_models:
continue
if SerializerModel not in model.__mro__:
continue
for obj in model.objects.all():
yield BlueprintEntry.from_model(obj)
def _pre_export(self, blueprint: Blueprint):
"""Hook to run anything pre-export"""
def export(self) -> Blueprint:
"""Create a list of all objects and create a blueprint"""
blueprint = Blueprint()
self._pre_export(blueprint)
blueprint.metadata = BlueprintMetadata(
name=_("authentik Export - %(date)s" % {"date": str(now())}),
labels={
LABEL_AUTHENTIK_GENERATED: "true",
},
)
blueprint.entries = list(self.get_entries())
return blueprint
def export_to_string(self) -> str:
"""Call export and convert it to yaml"""
blueprint = self.export()
return dump(blueprint, Dumper=BlueprintDumper)
class FlowExporter(Exporter):
"""Exporter customised to only return objects related to `flow`"""
flow: Flow
with_policies: bool
with_stage_prompts: bool
pbm_uuids: list[UUID]
def __init__(self, flow: Flow):
super().__init__()
self.flow = flow
self.with_policies = True
self.with_stage_prompts = True
def _pre_export(self, blueprint: Blueprint):
if not self.with_policies:
return
self.pbm_uuids = [self.flow.pbm_uuid]
self.pbm_uuids += FlowStageBinding.objects.filter(target=self.flow).values_list(
"pbm_uuid", flat=True
)
def walk_stages(self) -> Iterator[BlueprintEntry]:
"""Convert all stages attached to self.flow into BlueprintEntry objects"""
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
for stage in stages:
if isinstance(stage, PromptStage):
pass
yield BlueprintEntry.from_model(stage, "name")
def walk_stage_bindings(self) -> Iterator[BlueprintEntry]:
"""Convert all bindings attached to self.flow into BlueprintEntry objects"""
bindings = FlowStageBinding.objects.filter(target=self.flow).select_related()
for binding in bindings:
yield BlueprintEntry.from_model(binding, "target", "stage", "order")
def walk_policies(self) -> Iterator[BlueprintEntry]:
"""Walk over all policies. This is done at the beginning of the export for stages that have
a direct foreign key to a policy."""
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
# all policies referenced in there we also include here
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list("pk", flat=True)
query = Q(bindings__in=self.pbm_uuids) | Q(promptstage__in=prompt_stages)
policies = Policy.objects.filter(query).select_related()
for policy in policies:
yield BlueprintEntry.from_model(policy)
def walk_policy_bindings(self) -> Iterator[BlueprintEntry]:
"""Walk over all policybindings relative to us. This is run at the end of the export, as
we are sure all objects exist now."""
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
for binding in bindings:
yield BlueprintEntry.from_model(binding, "policy", "target", "order")
def walk_stage_prompts(self) -> Iterator[BlueprintEntry]:
"""Walk over all prompts associated with any PromptStages"""
prompt_stages = PromptStage.objects.filter(flow=self.flow)
for stage in prompt_stages:
for prompt in stage.fields.all():
yield BlueprintEntry.from_model(prompt)
def get_entries(self) -> Iterator[BlueprintEntry]:
entries = []
entries.append(BlueprintEntry.from_model(self.flow, "slug"))
if self.with_stage_prompts:
entries.extend(self.walk_stage_prompts())
if self.with_policies:
entries.extend(self.walk_policies())
entries.extend(self.walk_stages())
entries.extend(self.walk_stage_bindings())
if self.with_policies:
entries.extend(self.walk_policy_bindings())
return entries

View File

@ -1,5 +0,0 @@
"""Blueprint labels"""
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"

View File

@ -1,174 +0,0 @@
"""v1 blueprints tasks"""
from dataclasses import asdict, dataclass, field
from hashlib import sha512
from pathlib import Path
from typing import Optional
from dacite import from_dict
from django.db import DatabaseError, InternalError, ProgrammingError
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from structlog.stdlib import get_logger
from yaml import load
from yaml.error import YAMLError
from authentik.blueprints.models import (
BlueprintInstance,
BlueprintInstanceStatus,
BlueprintRetrievalFailed,
)
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
from authentik.events.monitored_tasks import (
MonitoredTask,
TaskResult,
TaskResultStatus,
prefill_task,
)
from authentik.events.utils import sanitize_dict
from authentik.lib.config import CONFIG
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@dataclass
class BlueprintFile:
"""Basic info about a blueprint file"""
path: str
version: int
hash: str
last_m: int
meta: Optional[BlueprintMetadata] = field(default=None)
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError),
)
def blueprints_find_dict():
"""Find blueprints as `blueprints_find` does, but return a safe dict"""
blueprints = []
for blueprint in blueprints_find():
blueprints.append(sanitize_dict(asdict(blueprint)))
return blueprints
def blueprints_find():
"""Find blueprints and return valid ones"""
blueprints = []
root = Path(CONFIG.y("blueprints_dir"))
for file in root.glob("**/*.yaml"):
path = Path(file)
LOGGER.debug("found blueprint", path=str(path))
with open(path, "r", encoding="utf-8") as blueprint_file:
try:
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
except YAMLError as exc:
raw_blueprint = None
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
if not raw_blueprint:
continue
metadata = raw_blueprint.get("metadata", None)
version = raw_blueprint.get("version", 1)
if version != 1:
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
continue
file_hash = sha512(path.read_bytes()).hexdigest()
blueprint = BlueprintFile(path.relative_to(root), version, file_hash, path.stat().st_mtime)
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
blueprints.append(blueprint)
LOGGER.info(
"parsed & loaded blueprint",
hash=file_hash,
path=str(path),
)
return blueprints
@CELERY_APP.task(
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
)
@prefill_task
def blueprints_discover(self: MonitoredTask):
"""Find blueprints and check if they need to be created in the database"""
count = 0
for blueprint in blueprints_find():
check_blueprint_v1_file(blueprint)
count += 1
self.set_status(
TaskResult(
TaskResultStatus.SUCCESSFUL,
messages=[_("Successfully imported %(count)d files." % {"count": count})],
)
)
def check_blueprint_v1_file(blueprint: BlueprintFile):
"""Check if blueprint should be imported"""
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=blueprint.path).first()
if (
blueprint.meta
and blueprint.meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false"
):
return
if not instance:
instance = BlueprintInstance(
name=blueprint.meta.name if blueprint.meta else str(blueprint.path),
path=blueprint.path,
context={},
status=BlueprintInstanceStatus.UNKNOWN,
enabled=True,
managed_models=[],
metadata={},
)
instance.save()
if instance.last_applied_hash != blueprint.hash:
instance.metadata = asdict(blueprint.meta) if blueprint.meta else {}
instance.save()
apply_blueprint.delay(instance.pk.hex)
@CELERY_APP.task(
bind=True,
base=MonitoredTask,
)
def apply_blueprint(self: MonitoredTask, instance_pk: str):
"""Apply single blueprint"""
self.set_uid(instance_pk)
self.save_on_success = False
try:
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
if not instance or not instance.enabled:
return
blueprint_content = instance.retrieve()
file_hash = sha512(blueprint_content.encode()).hexdigest()
importer = Importer(blueprint_content, instance.context)
valid, logs = importer.validate()
if not valid:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
return
applied = importer.apply()
if not applied:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
return
instance.status = BlueprintInstanceStatus.SUCCESSFUL
instance.last_applied_hash = file_hash
instance.last_applied = now()
instance.save()
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
except (
DatabaseError,
ProgrammingError,
InternalError,
IOError,
BlueprintRetrievalFailed,
) as exc:
instance.status = BlueprintInstanceStatus.ERROR
instance.save()
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))

View File

@ -1,15 +1,17 @@
"""Application API Views"""
from typing import Optional
from django.core.cache import cache
from django.db.models import QuerySet
from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from guardian.shortcuts import get_objects_for_user
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
extend_schema,
inline_serializer,
)
from rest_framework.decorators import action
from rest_framework.fields import ReadOnlyField, SerializerMethodField
from rest_framework.fields import BooleanField, CharField, FileField, ReadOnlyField
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.response import Response
@ -17,16 +19,13 @@ from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from structlog.stdlib import get_logger
from structlog.testing import capture_logs
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
from authentik.api.decorators import permission_required
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import FilePathSerializer, FileUploadSerializer
from authentik.core.models import Application, User
from authentik.events.models import EventAction
from authentik.events.utils import sanitize_dict
from authentik.policies.api.exec import PolicyTestResultSerializer
from authentik.policies.engine import PolicyEngine
from authentik.policies.types import PolicyResult
@ -43,16 +42,11 @@ def user_app_cache_key(user_pk: str) -> str:
class ApplicationSerializer(ModelSerializer):
"""Application Serializer"""
launch_url = SerializerMethodField()
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
launch_url = ReadOnlyField(source="get_launch_url")
provider_obj = ProviderSerializer(source="get_provider", required=False)
meta_icon = ReadOnlyField(source="get_meta_icon")
def get_launch_url(self, app: Application) -> Optional[str]:
"""Allow formatting of launch URL"""
user = self.context["request"].user
return app.get_launch_url(user)
class Meta:
model = Application
@ -63,13 +57,11 @@ class ApplicationSerializer(ModelSerializer):
"provider",
"provider_obj",
"launch_url",
"open_in_new_tab",
"meta_launch_url",
"meta_icon",
"meta_description",
"meta_publisher",
"policy_engine_mode",
"group",
]
extra_kwargs = {
"meta_icon": {"read_only": True},
@ -79,7 +71,7 @@ class ApplicationSerializer(ModelSerializer):
class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Application Viewset"""
queryset = Application.objects.all().prefetch_related("provider")
queryset = Application.objects.all()
serializer_class = ApplicationSerializer
search_fields = [
"name",
@ -87,15 +79,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"meta_launch_url",
"meta_description",
"meta_publisher",
"group",
]
filterset_fields = [
"name",
"slug",
"meta_launch_url",
"meta_description",
"meta_publisher",
"group",
]
lookup_field = "slug"
ordering = ["name"]
@ -139,25 +122,15 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
# If the current user is superuser, they can set `for_user`
for_user = request.user
if request.user.is_superuser and "for_user" in request.query_params:
try:
for_user = get_object_or_404(User, pk=request.query_params.get("for_user"))
except ValueError:
return HttpResponseBadRequest("for_user must be numerical")
for_user = get_object_or_404(User, pk=request.query_params.get("for_user"))
engine = PolicyEngine(application, for_user, request)
engine.use_cache = False
with capture_logs() as logs:
engine.build()
result = engine.result
engine.build()
result = engine.result
response = PolicyTestResultSerializer(PolicyResult(False))
if result.passing:
response = PolicyTestResultSerializer(PolicyResult(True))
if request.user.is_superuser:
log_messages = []
for log in logs:
if log.get("process", "") == "PolicyProcess":
continue
log_messages.append(sanitize_dict(log))
result.log_messages = log_messages
response = PolicyTestResultSerializer(result)
return Response(response.data)
@ -174,7 +147,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Custom list method that checks Policy based access instead of guardian"""
should_cache = request.GET.get("search", "") == ""
superuser_full_list = str(request.GET.get("superuser_full_list", "false")).lower() == "true"
superuser_full_list = (
str(request.GET.get("superuser_full_list", "false")).lower() == "true"
)
if superuser_full_list and request.user.is_superuser:
return super().list(request)
@ -204,7 +179,13 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.change_application")
@extend_schema(
request={
"multipart/form-data": FileUploadSerializer,
"multipart/form-data": inline_serializer(
"SetIcon",
fields={
"file": FileField(required=False),
"clear": BooleanField(default=False),
},
)
},
responses={
200: OpenApiResponse(description="Success"),
@ -236,7 +217,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.change_application")
@extend_schema(
request=FilePathSerializer,
request=inline_serializer("SetIconURL", fields={"url": CharField()}),
responses={
200: OpenApiResponse(description="Success"),
400: OpenApiResponse(description="Bad request"),
@ -259,7 +240,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
app.save()
return Response({})
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@permission_required(
"authentik_core.view_application", ["authentik_events.view_event"]
)
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
@ -267,10 +250,8 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(
get_events_per_1h(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
.get_events_per_hour()
)

View File

@ -11,7 +11,6 @@ from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.models import AuthenticatedSession
from authentik.events.geo import GEOIP_READER, GeoIPDict
@ -69,7 +68,9 @@ class AuthenticatedSessionSerializer(ModelSerializer):
"""Get parsed user agent"""
return user_agent_parser.Parse(instance.last_user_agent)
def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]: # pragma: no cover
def get_geo_ip(
self, instance: AuthenticatedSession
) -> Optional[GeoIPDict]: # pragma: no cover
"""Get parsed user agent"""
return GEOIP_READER.city_dict(instance.last_ip)
@ -103,8 +104,11 @@ class AuthenticatedSessionViewSet(
search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"]
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
filter_backends = [
DjangoFilterBackend,
OrderingFilter,
SearchFilter,
]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()

View File

@ -1,68 +0,0 @@
"""Authenticator Devices API Views"""
from django_otp import device_classes, devices_for_user
from django_otp.models import Device
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import MetaNameSerializer
class DeviceSerializer(MetaNameSerializer):
"""Serializer for Duo authenticator devices"""
pk = IntegerField()
name = CharField()
type = SerializerMethodField()
confirmed = BooleanField()
def get_type(self, instance: Device) -> str:
"""Get type of device"""
return instance._meta.label
class DeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
serializer_class = DeviceSerializer
permission_classes = [IsAuthenticated]
@extend_schema(responses={200: DeviceSerializer(many=True)})
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
devices = devices_for_user(request.user)
return Response(DeviceSerializer(devices, many=True).data)
class AdminDeviceViewSet(ViewSet):
"""Viewset for authenticator devices"""
serializer_class = DeviceSerializer
permission_classes = [IsAdminUser]
def get_devices(self, **kwargs):
"""Get all devices in all child classes"""
for model in device_classes():
device_set = model.objects.filter(**kwargs)
yield from device_set
@extend_schema(
parameters=[
OpenApiParameter(
name="user",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.INT,
)
],
responses={200: DeviceSerializer(many=True)},
)
def list(self, request: Request) -> Response:
"""Get all devices for current user"""
kwargs = {}
if "user" in request.query_params:
kwargs = {"user": request.query_params["user"]}
return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)

View File

@ -1,11 +1,9 @@
"""Groups API Viewset"""
from json import loads
from django.db.models.query import QuerySet
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filters import ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from rest_framework.fields import CharField, IntegerField, JSONField
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
from rest_framework.fields import BooleanField, CharField, JSONField
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
@ -17,6 +15,7 @@ from authentik.core.models import Group, User
class GroupMemberSerializer(ModelSerializer):
"""Stripped down user serializer to show relevant users for groups"""
is_superuser = BooleanField(read_only=True)
avatar = CharField(read_only=True)
attributes = JSONField(validators=[is_dict], required=False)
uid = CharField(read_only=True)
@ -30,6 +29,7 @@ class GroupMemberSerializer(ModelSerializer):
"name",
"is_active",
"last_login",
"is_superuser",
"email",
"avatar",
"attributes",
@ -44,41 +44,24 @@ class GroupSerializer(ModelSerializer):
users_obj = ListSerializer(
child=GroupMemberSerializer(), read_only=True, source="users", required=False
)
parent_name = CharField(source="parent.name", read_only=True)
num_pk = IntegerField(read_only=True)
class Meta:
model = Group
fields = [
"pk",
"num_pk",
"name",
"is_superuser",
"parent",
"parent_name",
"users",
"attributes",
"users_obj",
]
extra_kwargs = {
"users": {
"default": list,
}
}
class GroupFilter(FilterSet):
"""Filter for groups"""
attributes = CharFilter(
field_name="attributes",
lookup_expr="",
label="Attributes",
method="filter_attributes",
)
members_by_username = ModelMultipleChoiceFilter(
field_name="users__username",
to_field_name="username",
@ -89,34 +72,16 @@ class GroupFilter(FilterSet):
queryset=User.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args"""
try:
value = loads(value)
except ValueError:
raise ValidationError(detail="filter: failed to parse JSON")
if not isinstance(value, dict):
raise ValidationError(detail="filter: value must be key:value mapping")
qs = {}
for key, _value in value.items():
qs[f"attributes__{key}"] = _value
try:
_ = len(queryset.filter(**qs))
return queryset.filter(**qs)
except ValueError:
return queryset
class Meta:
model = Group
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
fields = ["name", "is_superuser", "members_by_pk", "members_by_username"]
class GroupViewSet(UsedByMixin, ModelViewSet):
"""Group Viewset"""
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
queryset = Group.objects.all()
serializer_class = GroupSerializer
search_fields = ["name", "is_superuser"]
filterset_class = GroupFilter

View File

@ -14,12 +14,16 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
from rest_framework.viewsets import GenericViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.api import ManagedSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
from authentik.core.api.utils import (
MetaNameSerializer,
PassiveSerializer,
TypeCreateSerializer,
)
from authentik.core.expression import PropertyMappingEvaluator
from authentik.core.models import PropertyMapping
from authentik.lib.utils.reflection import all_subclasses
from authentik.managed.api import ManagedSerializer
from authentik.policies.api.exec import PolicyTestSerializer
@ -56,7 +60,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
"component",
"verbose_name",
"verbose_name_plural",
"meta_model_name",
]
@ -138,7 +141,9 @@ class PropertyMappingViewSet(
self.request,
**test_params.validated_data.get("context", {}),
)
response_data["result"] = dumps(result, indent=(4 if format_result else None))
response_data["result"] = dumps(
result, indent=(4 if format_result else None)
)
except Exception as exc: # pylint: disable=broad-except
response_data["result"] = str(exc)
response_data["successful"] = False

View File

@ -43,7 +43,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
"assigned_application_name",
"verbose_name",
"verbose_name_plural",
"meta_model_name",
]

View File

@ -1,21 +1,18 @@
"""Source API Views"""
from typing import Iterable
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import extend_schema
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer, ReadOnlyField, SerializerMethodField
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from rest_framework.viewsets import GenericViewSet
from structlog.stdlib import get_logger
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
from authentik.core.models import Source, UserSourceConnection
from authentik.core.models import Source
from authentik.core.types import UserSettingSerializer
from authentik.lib.utils.reflection import all_subclasses
from authentik.policies.engine import PolicyEngine
@ -26,7 +23,6 @@ LOGGER = get_logger()
class SourceSerializer(ModelSerializer, MetaNameSerializer):
"""Source Serializer"""
managed = ReadOnlyField()
component = SerializerMethodField()
def get_component(self, obj: Source) -> str:
@ -49,11 +45,8 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
"component",
"verbose_name",
"verbose_name_plural",
"meta_model_name",
"policy_engine_mode",
"user_matching_mode",
"managed",
"user_path_template",
]
@ -69,8 +62,6 @@ class SourceViewSet(
queryset = Source.objects.none()
serializer_class = SourceSerializer
lookup_field = "slug"
search_fields = ["slug", "name"]
filterset_fields = ["slug", "name", "managed"]
def get_queryset(self): # pragma: no cover
return Source.objects.select_subclasses()
@ -83,8 +74,6 @@ class SourceViewSet(
for subclass in all_subclasses(self.queryset.model):
subclass: Source
component = ""
if len(subclass.__subclasses__()) > 0:
continue
if subclass._meta.abstract:
component = subclass.__bases__[0]().component
else:
@ -104,57 +93,21 @@ class SourceViewSet(
@action(detail=False, pagination_class=None, filter_backends=[])
def user_settings(self, request: Request) -> Response:
"""Get all sources the user can configure"""
_all_sources: Iterable[Source] = (
Source.objects.filter(enabled=True).select_subclasses().order_by("name")
)
_all_sources: Iterable[Source] = Source.objects.filter(
enabled=True
).select_subclasses()
matching_sources: list[UserSettingSerializer] = []
for source in _all_sources:
user_settings = source.ui_user_settings()
user_settings = source.ui_user_settings
if not user_settings:
continue
policy_engine = PolicyEngine(source, request.user, request)
policy_engine.build()
if not policy_engine.passing:
continue
source_settings = source.ui_user_settings()
source_settings = source.ui_user_settings
source_settings.initial_data["object_uid"] = source.slug
if not source_settings.is_valid():
LOGGER.warning(source_settings.errors)
matching_sources.append(source_settings.validated_data)
return Response(matching_sources)
class UserSourceConnectionSerializer(SourceSerializer):
"""OAuth Source Serializer"""
source = SourceSerializer(read_only=True)
class Meta:
model = UserSourceConnection
fields = [
"pk",
"user",
"source",
"created",
]
extra_kwargs = {
"user": {"read_only": True},
"created": {"read_only": True},
}
class UserSourceConnectionViewSet(
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
UsedByMixin,
mixins.ListModelMixin,
GenericViewSet,
):
"""User-source connection Viewset"""
queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["pk"]

View File

@ -1,42 +1,26 @@
"""Tokens API Viewset"""
from typing import Any
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from guardian.shortcuts import assign_perm, get_anonymous_user
from django.http.response import Http404
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.api.decorators import permission_required
from authentik.blueprints.api import ManagedSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import USER_ATTRIBUTE_TOKEN_EXPIRING, Token, TokenIntents
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
from authentik.managed.api import ManagedSerializer
class TokenSerializer(ManagedSerializer, ModelSerializer):
"""Token Serializer"""
user_obj = UserSerializer(required=False, source="user", read_only=True)
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
"""Ensure only API or App password tokens are created."""
request: Request = self.context["request"]
attrs.setdefault("user", request.user)
attrs.setdefault("intent", TokenIntents.INTENT_API)
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
return attrs
user = UserSerializer(required=False)
class Meta:
@ -47,14 +31,11 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
"identifier",
"intent",
"user",
"user_obj",
"description",
"expires",
"expiring",
]
extra_kwargs = {
"user": {"required": False},
}
depth = 2
class TokenViewSerializer(PassiveSerializer):
@ -82,27 +63,17 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"description",
"expires",
"expiring",
"managed",
]
ordering = ["identifier", "expires"]
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)
ordering = ["expires"]
def perform_create(self, serializer: TokenSerializer):
if not self.request.user.is_superuser:
instance = serializer.save(
user=self.request.user,
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
)
assign_perm("authentik_core.view_token_key", self.request.user, instance)
return instance
return super().perform_create(serializer)
serializer.save(
user=self.request.user,
intent=TokenIntents.INTENT_API,
expiring=self.request.user.attributes.get(
USER_ATTRIBUTE_TOKEN_EXPIRING, True
),
)
@permission_required("authentik_core.view_token_key")
@extend_schema(
@ -111,39 +82,14 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
404: OpenApiResponse(description="Token not found or expired"),
}
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
def view_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
token: Token = self.get_object()
Event.new(EventAction.SECRET_VIEW, secret=token).from_http(request) # noqa # nosec
return Response(TokenViewSerializer({"key": token.key}).data)
@permission_required("authentik_core.set_token_key")
@extend_schema(
request=inline_serializer(
"TokenSetKey",
{
"key": CharField(),
},
),
responses={
204: OpenApiResponse(description="Successfully changed key"),
400: OpenApiResponse(description="Missing key"),
404: OpenApiResponse(description="Token not found or expired"),
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument
def set_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
token: Token = self.get_object()
key = request.POST.get("key")
if not key:
return Response(status=400)
token.key = key
token.save()
Event.new(EventAction.MODEL_UPDATED, model=model_to_dict(token)).from_http(
if token.is_expired:
raise Http404
Event.new(EventAction.SECRET_VIEW, secret=token).from_http( # noqa # nosec
request
) # noqa # nosec
return Response(status=204)
)
return Response(TokenViewSerializer({"key": token.key}).data)

View File

@ -79,7 +79,9 @@ class UsedByMixin:
).all():
# Only merge shadows on first object
if first_object:
shadows += getattr(manager.model._meta, "authentik_used_by_shadows", [])
shadows += getattr(
manager.model._meta, "authentik_used_by_shadows", []
)
first_object = False
serializer = UsedBySerializer(
data={

View File

@ -1,78 +1,40 @@
"""User API Views"""
from datetime import timedelta
from json import loads
from typing import Any, Optional
from django.contrib.auth import update_session_auth_hash
from django.db.models.query import QuerySet
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.urls import reverse_lazy
from django.utils.http import urlencode
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import gettext as _
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
from django_filters.filters import BooleanFilter, CharFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
extend_schema,
extend_schema_field,
inline_serializer,
)
from guardian.shortcuts import get_anonymous_user, get_objects_for_user
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.utils import get_anonymous_user
from rest_framework.decorators import action
from rest_framework.fields import (
CharField,
IntegerField,
JSONField,
ListField,
SerializerMethodField,
)
from rest_framework.fields import CharField, JSONField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import (
BooleanField,
ListSerializer,
ModelSerializer,
PrimaryKeyRelatedField,
ValidationError,
)
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
from authentik.api.decorators import permission_required
from authentik.core.api.groups import GroupSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
from authentik.core.middleware import (
SESSION_KEY_IMPERSONATE_ORIGINAL_USER,
SESSION_KEY_IMPERSONATE_USER,
)
from authentik.core.models import (
USER_ATTRIBUTE_SA,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT,
Group,
Token,
TokenIntents,
User,
SESSION_IMPERSONATE_ORIGINAL_USER,
SESSION_IMPERSONATE_USER,
)
from authentik.core.models import Token, TokenIntents, User
from authentik.events.models import EventAction
from authentik.flows.models import FlowToken
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage
from authentik.tenants.models import Tenant
LOGGER = get_logger()
class UserSerializer(ModelSerializer):
"""User Serializer"""
@ -80,21 +42,8 @@ class UserSerializer(ModelSerializer):
is_superuser = BooleanField(read_only=True)
avatar = CharField(read_only=True)
attributes = JSONField(validators=[is_dict], required=False)
groups = PrimaryKeyRelatedField(
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all()
)
groups_obj = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
groups = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
uid = CharField(read_only=True)
username = CharField(max_length=150)
def validate_path(self, path: str) -> str:
"""Validate path"""
if path[:1] == "/" or path[-1] == "/":
raise ValidationError(_("No leading or trailing slashes allowed."))
for segment in path.split("/"):
if segment == "":
raise ValidationError(_("No empty segments in user path allowed."))
return path
class Meta:
@ -107,46 +56,21 @@ class UserSerializer(ModelSerializer):
"last_login",
"is_superuser",
"groups",
"groups_obj",
"email",
"avatar",
"attributes",
"uid",
"path",
]
extra_kwargs = {
"name": {"allow_blank": True},
}
class UserSelfSerializer(ModelSerializer):
"""User Serializer for information a user can retrieve about themselves"""
"""User Serializer for information a user can retrieve about themselves and
update about themselves"""
is_superuser = BooleanField(read_only=True)
avatar = CharField(read_only=True)
groups = SerializerMethodField()
groups = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
uid = CharField(read_only=True)
settings = SerializerMethodField()
@extend_schema_field(
ListSerializer(
child=inline_serializer(
"UserSelfGroups",
{"name": CharField(read_only=True), "pk": CharField(read_only=True)},
)
)
)
def get_groups(self, _: User):
"""Return only the group names a user is member of"""
for group in self.instance.ak_groups.all():
yield {
"name": group.name,
"pk": group.pk,
}
def get_settings(self, user: User) -> dict[str, Any]:
"""Get user settings with tenant and group settings applied"""
return user.group_attributes(self._context["request"]).get("settings", {})
class Meta:
@ -161,11 +85,9 @@ class UserSelfSerializer(ModelSerializer):
"email",
"avatar",
"uid",
"settings",
]
extra_kwargs = {
"is_active": {"read_only": True},
"name": {"allow_blank": True},
}
@ -188,30 +110,22 @@ class UserMetricsSerializer(PassiveSerializer):
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN, user__pk=user.pk)
.get_events_per_hour()
)
return get_events_per_1h(action=EventAction.LOGIN, user__pk=user.pk)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
.get_events_per_hour()
return get_events_per_1h(
action=EventAction.LOGIN_FAILED, context__username=user.username
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
.get_events_per_hour()
return get_events_per_1h(
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
)
@ -226,22 +140,6 @@ class UsersFilter(FilterSet):
)
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
uuid = CharFilter(field_name="uuid")
path = CharFilter(
field_name="path",
)
path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
groups_by_name = ModelMultipleChoiceFilter(
field_name="ak_groups__name",
to_field_name="name",
queryset=Group.objects.all(),
)
groups_by_pk = ModelMultipleChoiceFilter(
field_name="ak_groups",
queryset=Group.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
@ -255,11 +153,7 @@ class UsersFilter(FilterSet):
qs = {}
for key, _value in value.items():
qs[f"attributes__{key}"] = _value
try:
_ = len(queryset.filter(**qs))
return queryset.filter(**qs)
except ValueError:
return queryset
return queryset.filter(**qs)
class Meta:
model = User
@ -270,8 +164,6 @@ class UsersFilter(FilterSet):
"is_active",
"is_superuser",
"attributes",
"groups_by_name",
"groups_by_pk",
]
@ -279,149 +171,51 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""User Viewset"""
queryset = User.objects.none()
ordering = ["username"]
serializer_class = UserSerializer
search_fields = ["username", "name", "is_active", "email", "uuid"]
search_fields = ["username", "name", "is_active", "email"]
filterset_class = UsersFilter
def get_queryset(self): # pragma: no cover
return User.objects.all().exclude(pk=get_anonymous_user().pk)
def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]:
"""Create a recovery link (when the current tenant has a recovery flow set),
that can either be shown to an admin or sent to the user directly"""
tenant: Tenant = self.request._request.tenant
# Check that there is a recovery flow, if not return an error
flow = tenant.flow_recovery
if not flow:
LOGGER.debug("No recovery flow set")
return None, None
user: User = self.get_object()
planner = FlowPlanner(flow)
planner.allow_empty_flows = True
plan = planner.plan(
self.request._request,
{
PLAN_CONTEXT_PENDING_USER: user,
},
)
token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset",
defaults={
"user": user,
"flow": flow,
"_plan": FlowToken.pickle(plan),
},
)
querystring = urlencode({QS_KEY_TOKEN: token.key})
link = self.request.build_absolute_uri(
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
+ f"?{querystring}"
)
return link, token
@permission_required(None, ["authentik_core.add_user", "authentik_core.add_token"])
@extend_schema(
request=inline_serializer(
"UserServiceAccountSerializer",
{
"name": CharField(required=True),
"create_group": BooleanField(default=False),
},
),
responses={
200: inline_serializer(
"UserServiceAccountResponse",
{
"username": CharField(required=True),
"token": CharField(required=True),
"user_uid": CharField(required=True),
"user_pk": IntegerField(required=True),
"group_pk": CharField(required=False),
},
)
},
)
@action(detail=False, methods=["POST"], pagination_class=None, filter_backends=[])
def service_account(self, request: Request) -> Response:
"""Create a new user account that is marked as a service account"""
username = request.data.get("name")
create_group = request.data.get("create_group", False)
with atomic():
try:
user = User.objects.create(
username=username,
name=username,
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
path=USER_PATH_SERVICE_ACCOUNT,
)
response = {
"username": user.username,
"user_uid": user.uid,
"user_pk": user.pk,
}
if create_group and self.request.user.has_perm("authentik_core.add_group"):
group = Group.objects.create(
name=username,
)
group.users.add(user)
response["group_pk"] = str(group.pk)
token = Token.objects.create(
identifier=slugify(f"service-account-{username}-password"),
intent=TokenIntents.INTENT_APP_PASSWORD,
user=user,
expires=now() + timedelta(days=360),
)
response["token"] = token.key
return Response(response)
except (IntegrityError) as exc:
return Response(data={"non_field_errors": [str(exc)]}, status=400)
@extend_schema(responses={200: SessionUserSerializer(many=False)})
@action(detail=False, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name
def me(self, request: Request) -> Response:
"""Get information about current user"""
context = {"request": request}
serializer = SessionUserSerializer(
data={"user": UserSelfSerializer(instance=request.user, context=context).data}
data={"user": UserSerializer(request.user).data}
)
if SESSION_KEY_IMPERSONATE_USER in request._request.session:
if SESSION_IMPERSONATE_USER in request._request.session:
serializer.initial_data["original"] = UserSelfSerializer(
instance=request._request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER],
context=context,
request._request.session[SESSION_IMPERSONATE_ORIGINAL_USER]
).data
self.request.session.modified = True
return Response(serializer.initial_data)
serializer.is_valid()
return Response(serializer.data)
@permission_required("authentik_core.reset_user_password")
@extend_schema(
request=inline_serializer(
"UserPasswordSetSerializer",
{
"password": CharField(required=True),
},
),
responses={
204: OpenApiResponse(description="Successfully changed password"),
400: OpenApiResponse(description="Bad request"),
},
request=UserSelfSerializer, responses={200: SessionUserSerializer(many=False)}
)
@action(detail=True, methods=["POST"])
# pylint: disable=invalid-name, unused-argument
def set_password(self, request: Request, pk: int) -> Response:
"""Set password for user"""
user: User = self.get_object()
try:
user.set_password(request.data.get("password"))
user.save()
except (ValidationError, IntegrityError) as exc:
LOGGER.debug("Failed to set password", exc=exc)
return Response(status=400)
if user.pk == request.user.pk and SESSION_KEY_IMPERSONATE_USER not in self.request.session:
LOGGER.debug("Updating session hash after password change")
update_session_auth_hash(self.request, user)
return Response(status=204)
@action(
methods=["PUT"],
detail=False,
pagination_class=None,
filter_backends=[],
permission_classes=[IsAuthenticated],
)
def update_self(self, request: Request) -> Response:
"""Allow users to change information on their own profile"""
data = UserSelfSerializer(
instance=User.objects.get(pk=request.user.pk), data=request.data
)
if not data.is_valid():
return Response(data.errors)
new_user = data.save()
# If we're impersonating, we need to update that user object
# since it caches the full object
if SESSION_IMPERSONATE_USER in request.session:
request.session[SESSION_IMPERSONATE_USER] = new_user
return self.me(request)
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
@ -445,59 +239,23 @@ class UserViewSet(UsedByMixin, ModelViewSet):
# pylint: disable=invalid-name, unused-argument
def recovery(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
link, _ = self._create_recovery_link()
if not link:
LOGGER.debug("Couldn't create token")
tenant: Tenant = request._request.tenant
# Check that there is a recovery flow, if not return an error
flow = tenant.flow_recovery
if not flow:
return Response({"link": ""}, status=404)
return Response({"link": link})
@permission_required("authentik_core.reset_user_password")
@extend_schema(
parameters=[
OpenApiParameter(
name="email_stage",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
required=True,
)
],
responses={
"204": OpenApiResponse(description="Successfully sent recover email"),
"404": OpenApiResponse(description="Bad request"),
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def recovery_email(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
for_user = self.get_object()
if for_user.email == "":
LOGGER.debug("User doesn't have an email address")
return Response(status=404)
link, token = self._create_recovery_link()
if not link:
LOGGER.debug("Couldn't create token")
return Response(status=404)
# Lookup the email stage to assure the current user can access it
stages = get_objects_for_user(
request.user, "authentik_stages_email.view_emailstage"
).filter(pk=request.query_params.get("email_stage"))
if not stages.exists():
LOGGER.debug("Email stage does not exist/user has no permissions")
return Response(status=404)
email_stage: EmailStage = stages.first()
message = TemplateEmailMessage(
subject=_(email_stage.subject),
template_name=email_stage.template,
to=[for_user.email],
template_context={
"url": link,
"user": for_user,
"expires": token.expires,
},
user: User = self.get_object()
token, __ = Token.objects.get_or_create(
identifier=f"{user.uid}-password-reset",
user=user,
intent=TokenIntents.INTENT_RECOVERY,
)
send_mails(email_stage, message)
return Response(status=204)
querystring = urlencode({"token": token.key})
link = request.build_absolute_uri(
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
+ f"?{querystring}"
)
return Response({"link": link})
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
@ -511,32 +269,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if self.request.user.has_perm("authentik_core.view_user"):
return self._filter_queryset_for_list(queryset)
return super().filter_queryset(queryset)
@extend_schema(
responses={
200: inline_serializer(
"UserPathSerializer", {"paths": ListField(child=CharField(), read_only=True)}
)
},
parameters=[
OpenApiParameter(
name="search",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
)
],
)
@action(detail=False, pagination_class=None)
def paths(self, request: Request) -> Response:
"""Get all user paths"""
return Response(
data={
"paths": list(
self.filter_queryset(self.get_queryset())
.values("path")
.distinct()
.order_by("path")
.values_list("path", flat=True)
)
}
)

View File

@ -2,15 +2,21 @@
from typing import Any
from django.db.models import Model
from rest_framework.fields import BooleanField, CharField, FileField, IntegerField
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
from rest_framework.fields import CharField, IntegerField
from rest_framework.serializers import (
Serializer,
SerializerMethodField,
ValidationError,
)
def is_dict(value: Any):
"""Ensure a value is a dictionary, useful for JSONFields"""
if isinstance(value, dict):
return
raise ValidationError("Value must be a dictionary, and not have any duplicate keys.")
raise ValidationError(
"Value must be a dictionary, and not have any duplicate keys."
)
class PassiveSerializer(Serializer):
@ -19,21 +25,13 @@ class PassiveSerializer(Serializer):
def create(self, validated_data: dict) -> Model: # pragma: no cover
return Model()
def update(self, instance: Model, validated_data: dict) -> Model: # pragma: no cover
def update(
self, instance: Model, validated_data: dict
) -> Model: # pragma: no cover
return Model()
class FileUploadSerializer(PassiveSerializer):
"""Serializer to upload file"""
file = FileField(required=False)
clear = BooleanField(default=False)
class FilePathSerializer(PassiveSerializer):
"""Serializer to upload file"""
url = CharField()
class Meta:
model = Model
class MetaNameSerializer(PassiveSerializer):
@ -41,7 +39,6 @@ class MetaNameSerializer(PassiveSerializer):
verbose_name = SerializerMethodField()
verbose_name_plural = SerializerMethodField()
meta_model_name = SerializerMethodField()
def get_verbose_name(self, obj: Model) -> str:
"""Return object's verbose_name"""
@ -51,10 +48,6 @@ class MetaNameSerializer(PassiveSerializer):
"""Return object's plural verbose_name"""
return obj._meta.verbose_name_plural
def get_meta_model_name(self, obj: Model) -> str:
"""Return internal model name"""
return f"{obj._meta.app_label}.{obj._meta.model_name}"
class TypeCreateSerializer(PassiveSerializer):
"""Types of an object that can be created"""

View File

@ -1,37 +1,30 @@
"""authentik core app config"""
from django.conf import settings
from importlib import import_module
from authentik.blueprints.manager import ManagedAppConfig
from django.apps import AppConfig
from django.db import ProgrammingError
from authentik.core.signals import GAUGE_MODELS
from authentik.lib.utils.reflection import get_apps
class AuthentikCoreConfig(ManagedAppConfig):
class AuthentikCoreConfig(AppConfig):
"""authentik core app config"""
name = "authentik.core"
label = "authentik_core"
verbose_name = "authentik Core"
mountpoint = ""
default = True
def reconcile_load_core_signals(self):
"""Load core signals"""
self.import_module("authentik.core.signals")
def reconcile_debug_worker_hook(self):
"""Dispatch startup tasks inline when debugging"""
if settings.DEBUG:
from authentik.root.celery import worker_ready_hook
worker_ready_hook()
def reconcile_source_inbuilt(self):
"""Reconcile inbuilt source"""
from authentik.core.models import Source
Source.objects.update_or_create(
defaults={
"name": "authentik Built-in",
"slug": "authentik-built-in",
},
managed="goauthentik.io/sources/inbuilt",
)
def ready(self):
import_module("authentik.core.signals")
import_module("authentik.core.managed")
try:
for app in get_apps():
for model in app.get_models():
GAUGE_MODELS.labels(
model_name=model._meta.model_name,
app=model._meta.app_label,
).set(model.objects.count())
except ProgrammingError:
pass

View File

@ -1,60 +0,0 @@
"""Authenticate with tokens"""
from typing import Any, Optional
from django.contrib.auth.backends import ModelBackend
from django.http.request import HttpRequest
from authentik.core.models import Token, TokenIntents, User
from authentik.events.utils import cleanse_dict, sanitize_dict
from authentik.flows.planner import FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
class InbuiltBackend(ModelBackend):
"""Inbuilt backend"""
def authenticate(
self, request: HttpRequest, username: Optional[str], password: Optional[str], **kwargs: Any
) -> Optional[User]:
user = super().authenticate(request, username=username, password=password, **kwargs)
if not user:
return None
self.set_method("password", request)
return user
def set_method(self, method: str, request: Optional[HttpRequest], **kwargs):
"""Set method data on current flow, if possbiel"""
if not request:
return
# Since we can't directly pass other variables to signals, and we want to log the method
# and the token used, we assume we're running in a flow and set a variable in the context
flow_plan: FlowPlan = request.session.get(SESSION_KEY_PLAN, FlowPlan(""))
flow_plan.context[PLAN_CONTEXT_METHOD] = method
flow_plan.context[PLAN_CONTEXT_METHOD_ARGS] = cleanse_dict(sanitize_dict(kwargs))
request.session[SESSION_KEY_PLAN] = flow_plan
class TokenBackend(InbuiltBackend):
"""Authenticate with token"""
def authenticate(
self, request: HttpRequest, username: Optional[str], password: Optional[str], **kwargs: Any
) -> Optional[User]:
try:
user = User._default_manager.get_by_natural_key(username)
except User.DoesNotExist:
# Run the default password hasher once to reduce the timing
# difference between an existing and a nonexistent user (#20760).
User().set_password(password)
return None
# pylint: disable=no-member
tokens = Token.filter_not_expired(
user=user, key=password, intent=TokenIntents.INTENT_APP_PASSWORD
)
if not tokens.exists():
return None
token = tokens.first()
self.set_method("token", request, token=token)
return token.user

View File

@ -4,7 +4,7 @@ from channels.generic.websocket import JsonWebsocketConsumer
from rest_framework.exceptions import AuthenticationFailed
from structlog.stdlib import get_logger
from authentik.api.authentication import bearer_auth
from authentik.api.authentication import token_from_header
from authentik.core.models import User
LOGGER = get_logger()
@ -24,12 +24,12 @@ class AuthJsonConsumer(JsonWebsocketConsumer):
raw_header = headers[b"authorization"]
try:
user = bearer_auth(raw_header)
# user is only None when no header was given, in which case we deny too
if not user:
token = token_from_header(raw_header)
# token is only None when no header was given, in which case we deny too
if not token:
raise DenyConnection()
except AuthenticationFailed as exc:
LOGGER.warning("Failed to authenticate", exc=exc)
raise DenyConnection()
self.user = user
self.user = token.user

16
authentik/core/managed.py Normal file
View File

@ -0,0 +1,16 @@
"""Core managed objects"""
from authentik.core.models import Source
from authentik.managed.manager import EnsureExists, ObjectManager
class CoreManager(ObjectManager):
"""Core managed objects"""
def reconcile(self):
return [
EnsureExists(
Source,
"goauthentik.io/sources/inbuilt",
name="authentik Built-in",
),
]

View File

@ -1,13 +0,0 @@
"""Run bootstrap tasks"""
from django.core.management.base import BaseCommand
from authentik.root.celery import _get_startup_tasks
class Command(BaseCommand):
"""Run bootstrap tasks to ensure certain objects are created"""
def handle(self, **options):
tasks = _get_startup_tasks()
for task in tasks:
task()

View File

@ -1,17 +0,0 @@
"""Repair missing permissions"""
from django.apps import apps
from django.contrib.auth.management import create_permissions
from django.core.management.base import BaseCommand, no_translations
from guardian.management import create_anonymous_user
class Command(BaseCommand):
"""Repair missing permissions"""
@no_translations
def handle(self, *args, **options):
"""Check permissions for all apps"""
for app in apps.get_app_configs():
self.stdout.write(f"Checking app {app.name} ({app.label})\n")
create_permissions(app, verbosity=0)
create_anonymous_user(None, using="default")

View File

@ -1,103 +0,0 @@
"""authentik shell command"""
import code
import platform
from django.apps import apps
from django.core.management.base import BaseCommand
from django.db.models import Model
from django.db.models.signals import post_save, pre_delete
from authentik import __version__
from authentik.core.models import User
from authentik.events.middleware import IGNORED_MODELS
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
BANNER_TEXT = """### authentik shell ({authentik})
### Node {node} | Arch {arch} | Python {python} """.format(
node=platform.node(),
python=platform.python_version(),
arch=platform.machine(),
authentik=__version__,
)
class Command(BaseCommand):
"""Start the Django shell with all authentik models already imported"""
django_models = {}
def add_arguments(self, parser):
parser.add_argument(
"-c",
"--command",
help="Python code to execute (instead of starting an interactive shell)",
)
def get_namespace(self):
"""Prepare namespace with all models"""
namespace = {}
# Gather Django models and constants from each app
for app in apps.get_app_configs():
# Load models from each app
for model in app.get_models():
namespace[model.__name__] = model
return namespace
@staticmethod
# pylint: disable=unused-argument
def post_save_handler(sender, instance: Model, created: bool, **_):
"""Signal handler for all object's post_save"""
if isinstance(instance, IGNORED_MODELS):
return
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
Event.new(action, model=model_to_dict(instance)).set_user(
User(
username="authentik-shell",
pk=0,
email="",
)
).save()
@staticmethod
# pylint: disable=unused-argument
def pre_delete_handler(sender, instance: Model, **_):
"""Signal handler for all object's pre_delete"""
if isinstance(instance, IGNORED_MODELS): # pragma: no cover
return
Event.new(EventAction.MODEL_DELETED, model=model_to_dict(instance)).set_user(
User(
username="authentik-shell",
pk=0,
email="",
)
).save()
def handle(self, **options):
namespace = self.get_namespace()
post_save.connect(Command.post_save_handler)
pre_delete.connect(Command.pre_delete_handler)
# If Python code has been passed, execute it and exit.
if options["command"]:
# pylint: disable=exec-used
exec(options["command"], namespace) # nosec # noqa
return
# Try to enable tab-complete
try:
import readline
import rlcompleter
except ModuleNotFoundError:
pass
else:
readline.set_completer(rlcompleter.Completer(namespace).complete)
readline.parse_and_bind("tab: complete")
# Run interactive shell
code.interact(banner=BANNER_TEXT, local=namespace)

Some files were not shown because too many files have changed in this diff Show More