Compare commits
5 Commits
version-20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
fe5d22ce6c | |||
0e30b6ee55 | |||
6cbba45291 | |||
ba023a3bba | |||
6c805bcf32 |
@ -1,20 +1,36 @@
|
||||
[bumpversion]
|
||||
current_version = 2023.1.3
|
||||
current_version = 2021.8.5
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
serialize = {major}.{minor}.{patch}
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{release}
|
||||
{major}.{minor}.{patch}
|
||||
message = release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:file:pyproject.toml]
|
||||
[bumpversion:part:release]
|
||||
optional_value = stable
|
||||
first_value = beta
|
||||
values =
|
||||
alpha
|
||||
beta
|
||||
stable
|
||||
|
||||
[bumpversion:file:website/docs/installation/docker-compose.md]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
||||
[bumpversion:file:schema.yml]
|
||||
|
||||
[bumpversion:file:.github/workflows/release-publish.yml]
|
||||
|
||||
[bumpversion:file:authentik/__init__.py]
|
||||
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
||||
[bumpversion:file:web/src/common/constants.ts]
|
||||
[bumpversion:file:web/src/constants.ts]
|
||||
|
||||
[bumpversion:file:website/docs/outposts/manual-deploy-docker-compose.md]
|
||||
|
||||
[bumpversion:file:website/docs/outposts/manual-deploy-kubernetes.md]
|
||||
|
@ -1,8 +1,8 @@
|
||||
env
|
||||
static
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
Dockerfile
|
||||
|
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -27,7 +27,7 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- authentik version: [e.g. 0.10.0-stable]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
2
.github/ISSUE_TEMPLATE/question.md
vendored
2
.github/ISSUE_TEMPLATE/question.md
vendored
@ -20,7 +20,7 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- authentik version: [e.g. 0.10.0-stable]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
@ -1,78 +0,0 @@
|
||||
name: 'Comment usage instructions on PRs'
|
||||
description: 'Comment usage instructions on PRs'
|
||||
|
||||
inputs:
|
||||
tag:
|
||||
description: "Image tag to pull"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate config
|
||||
id: ev
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: authentik PR Installation instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
authentik PR Installation instructions
|
||||
|
||||
<details>
|
||||
<summary>Instructions for docker-compose</summary>
|
||||
|
||||
Add the following block to your `.env` file:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
<summary>Instructions for Kubernetes</summary>
|
||||
|
||||
Add the following block to your `values.yml` file:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
edit-mode: replace
|
62
.github/actions/docker-push-variables/action.yml
vendored
62
.github/actions/docker-push-variables/action.yml
vendored
@ -1,62 +0,0 @@
|
||||
name: 'Prepare docker environment variables'
|
||||
description: 'Prepare docker environment variables'
|
||||
|
||||
outputs:
|
||||
shouldBuild:
|
||||
description: "Whether to build image or not"
|
||||
value: ${{ steps.ev.outputs.shouldBuild }}
|
||||
branchName:
|
||||
description: "Branch name"
|
||||
value: ${{ steps.ev.outputs.branchName }}
|
||||
branchNameContainer:
|
||||
description: "Branch name (for containers)"
|
||||
value: ${{ steps.ev.outputs.branchNameContainer }}
|
||||
timestamp:
|
||||
description: "Timestamp"
|
||||
value: ${{ steps.ev.outputs.timestamp }}
|
||||
sha:
|
||||
description: "sha"
|
||||
value: ${{ steps.ev.outputs.sha }}
|
||||
shortHash:
|
||||
description: "shortHash"
|
||||
value: ${{ steps.ev.outputs.shortHash }}
|
||||
version:
|
||||
description: "version"
|
||||
value: ${{ steps.ev.outputs.version }}
|
||||
versionFamily:
|
||||
description: "versionFamily"
|
||||
value: ${{ steps.ev.outputs.versionFamily }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate config
|
||||
id: ev
|
||||
shell: python
|
||||
run: |
|
||||
"""Helper script to get the actual branch name, docker safe"""
|
||||
import configparser
|
||||
import os
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
|
||||
branch_name = os.environ["GITHUB_REF"]
|
||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
|
||||
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
||||
version = parser.get("bumpversion", "current_version")
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
45
.github/actions/setup/action.yml
vendored
45
.github/actions/setup/action.yml
vendored
@ -1,45 +0,0 @@
|
||||
name: 'Setup authentik testing environment'
|
||||
description: 'Setup authentik testing environment'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt update
|
||||
sudo apt install -y libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'poetry'
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3.1.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
shell: poetry run python {0}
|
||||
run: |
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
||||
with open("local.env.yml", "w") as _config:
|
||||
safe_dump(
|
||||
{
|
||||
"log_level": "debug",
|
||||
"secret_key": generate_id(),
|
||||
},
|
||||
_config,
|
||||
default_flow_style=False,
|
||||
)
|
4
.github/codespell-words.txt
vendored
4
.github/codespell-words.txt
vendored
@ -1,4 +0,0 @@
|
||||
keypair
|
||||
keypairs
|
||||
hass
|
||||
warmup
|
108
.github/dependabot.yml
vendored
108
.github/dependabot.yml
vendored
@ -1,62 +1,50 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -1,7 +1,7 @@
|
||||
<!--
|
||||
👋 Hello there! Welcome.
|
||||
|
||||
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/main/CONTRIBUTING.md#how-can-i-contribute).
|
||||
Please check the [Contributing guidelines](https://github.com/goauthentik/authentik/blob/master/CONTRIBUTING.md#how-can-i-contribute).
|
||||
-->
|
||||
|
||||
# Details
|
||||
|
4
.github/stale.yml
vendored
4
.github/stale.yml
vendored
@ -7,10 +7,6 @@ exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- pr_wanted
|
||||
- enhancement
|
||||
- bug/confirmed
|
||||
- enhancement/confirmed
|
||||
- question
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
16
.github/transifex.yml
vendored
16
.github/transifex.yml
vendored
@ -1,16 +0,0 @@
|
||||
git:
|
||||
filters:
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: web/src/locales/en.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: locale/en/LC_MESSAGES/django.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
384
.github/workflows/ci-main.yml
vendored
384
.github/workflows/ci-main.yml
vendored
@ -3,14 +3,14 @@ name: authentik-ci-main
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
paths-ignore:
|
||||
- website
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -18,145 +18,256 @@ env:
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- pylint
|
||||
- black
|
||||
- isort
|
||||
- bandit
|
||||
- pyright
|
||||
- pending-migrations
|
||||
lint-pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
run: poetry run make ci-${{ matrix.job }}
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run pylint
|
||||
run: pipenv run pylint authentik tests lifecycle
|
||||
lint-black:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run black
|
||||
run: pipenv run black --check authentik tests lifecycle
|
||||
lint-isort:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run isort
|
||||
run: pipenv run isort --check authentik tests lifecycle
|
||||
lint-bandit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run bandit
|
||||
run: pipenv run bandit -r authentik tests lifecycle
|
||||
lint-pyright:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: prepare
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
npm install -g pyright@1.1.136
|
||||
- name: run bandit
|
||||
run: pipenv run pyright e2e lifecycle
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
python-version: '3.9'
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
run: |
|
||||
set -x
|
||||
git fetch
|
||||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
poetry install
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
git checkout $GITHUB_REF
|
||||
pipenv sync --dev
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
pipenv run make test
|
||||
pipenv run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [unittest]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: unit
|
||||
uses: codecov/codecov-action@v2
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.5.0
|
||||
uses: helm/kind-action@v1.2.0
|
||||
- name: run integration
|
||||
run: |
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: integration
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: proxy
|
||||
glob: tests/e2e/test_provider_proxy*
|
||||
- name: oauth
|
||||
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
|
||||
- name: oauth-oidc
|
||||
glob: tests/e2e/test_provider_oidc*
|
||||
- name: saml
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
pipenv run make test-integration
|
||||
pipenv run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
testspace [integration]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
test-e2e:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
docker-compose -f tests/e2e/ci.docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
cd web
|
||||
npm i
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run coverage xml
|
||||
pipenv run make test-e2e
|
||||
pipenv run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [e2e]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: e2e
|
||||
ci-core-mark:
|
||||
uses: codecov/codecov-action@v2
|
||||
build:
|
||||
needs:
|
||||
- lint
|
||||
- lint-pylint
|
||||
- lint-black
|
||||
- lint-isort
|
||||
- lint-bandit
|
||||
- lint-pyright
|
||||
- test-migrations
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
@ -164,81 +275,28 @@ jobs:
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_do_set_branch.py
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
registry: beryju.org
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_PASSWORD }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}
|
||||
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
|
125
.github/workflows/ci-outpost.yml
vendored
125
.github/workflows/ci-outpost.yml
vendored
@ -3,134 +3,73 @@ name: authentik-ci-outpost
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
|
||||
jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- name: Prepare and generate API
|
||||
go-version: '^1.16.3'
|
||||
- name: Generate API
|
||||
run: |
|
||||
make gen-outpost
|
||||
- name: Run linter
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
mkdir -p web/dist
|
||||
mkdir -p website/help
|
||||
touch web/dist/test website/help/test
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
args: --timeout 5000s
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
run: |
|
||||
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./...
|
||||
ci-outpost-mark:
|
||||
docker run \
|
||||
--rm \
|
||||
-v $(pwd):/app \
|
||||
-w /app \
|
||||
golangci/golangci-lint:v1.39.0 \
|
||||
golangci-lint run -v --timeout 200s
|
||||
build:
|
||||
needs:
|
||||
- lint-golint
|
||||
- test-unittest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_do_set_branch.py
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
registry: beryju.org
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
password: ${{ secrets.HARBOR_PASSWORD }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchName }}
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
|
100
.github/workflows/ci-web.yml
vendored
100
.github/workflows/ci-web.yml
vendored
@ -3,107 +3,87 @@ name: authentik-ci-web
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
|
||||
jobs:
|
||||
lint-eslint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- run: |
|
||||
cd web
|
||||
npm install
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
run: make gen-web
|
||||
- name: Eslint
|
||||
working-directory: web/
|
||||
run: npm run lint
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: TSC
|
||||
working-directory: web/
|
||||
run: npm run tsc
|
||||
run: |
|
||||
cd web
|
||||
npm run lint
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- run: |
|
||||
cd web
|
||||
npm install
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
run: make gen-web
|
||||
- name: prettier
|
||||
working-directory: web/
|
||||
run: npm run prettier-check
|
||||
run: |
|
||||
cd web
|
||||
npm run prettier-check
|
||||
lint-lit-analyse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
# lit-analyse doesn't understand path rewrites, so make it
|
||||
# belive it's an actual module
|
||||
cd node_modules/@goauthentik
|
||||
ln -s ../../src/ web
|
||||
- run: |
|
||||
cd web
|
||||
npm install
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: lit-analyse
|
||||
working-directory: web/
|
||||
run: npm run lit-analyse
|
||||
ci-web-mark:
|
||||
run: make gen-web
|
||||
- name: prettier
|
||||
run: |
|
||||
cd web
|
||||
npm run lit-analyse
|
||||
build:
|
||||
needs:
|
||||
- lint-eslint
|
||||
- lint-prettier
|
||||
- lint-lit-analyse
|
||||
- lint-build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- run: |
|
||||
cd web
|
||||
npm install
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
run: make gen-web
|
||||
- name: build
|
||||
working-directory: web/
|
||||
run: npm run build
|
||||
run: |
|
||||
cd web
|
||||
npm run build
|
||||
|
33
.github/workflows/ci-website.yml
vendored
33
.github/workflows/ci-website.yml
vendored
@ -1,33 +0,0 @@
|
||||
name: authentik-ci-website
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: prettier
|
||||
working-directory: website/
|
||||
run: npm run prettier-check
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-prettier
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
12
.github/workflows/codeql-analysis.yml
vendored
12
.github/workflows/codeql-analysis.yml
vendored
@ -2,10 +2,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, '*', next, version* ]
|
||||
branches: [ master, '*', next, version* ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
branches: [ master ]
|
||||
schedule:
|
||||
- cron: '30 6 * * 5'
|
||||
|
||||
@ -28,11 +28,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@ -43,7 +43,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@ -57,4 +57,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
22
.github/workflows/ghcr-retention.yml
vendored
22
.github/workflows/ghcr-retention.yml
vendored
@ -1,22 +0,0 @@
|
||||
name: ghcr-retention
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
clean-ghcr:
|
||||
name: Delete old unused container images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: snok/container-retention-policy@v1
|
||||
with:
|
||||
image-names: dev-server,dev-ldap,dev-proxy
|
||||
cut-off: One week ago UTC
|
||||
account-type: org
|
||||
org-name: goauthentik
|
||||
untagged-only: false
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
skip-tags: gh-next,gh-main
|
208
.github/workflows/release-publish.yml
vendored
208
.github/workflows/release-publish.yml
vendored
@ -3,173 +3,179 @@ name: authentik-on-release
|
||||
on:
|
||||
release:
|
||||
types: [published, created]
|
||||
push:
|
||||
branches:
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
# Build
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: |
|
||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||
beryju/authentik:2021.8.5,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/server:2021.8.5,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost:
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
docker push beryju/authentik:stable
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
docker tag ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:stable
|
||||
docker push ghcr.io/goauthentik/server:stable
|
||||
build-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
go-version: "^1.15"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||
beryju/authentik-${{ matrix.type }}:latest,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
beryju/authentik-proxy:2021.8.5,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.8.5,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: proxy.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
docker push beryju/authentik-proxy:stable
|
||||
docker pull ghcr.io/goauthentik/proxy:latest
|
||||
docker tag ghcr.io/goauthentik/proxy:latest ghcr.io/goauthentik/proxy:stable
|
||||
docker push ghcr.io/goauthentik/proxy:stable
|
||||
build-ldap:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
go-version: "^1.15"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.8.5,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.8.5,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
|
||||
run: |
|
||||
npm ci
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
set -x
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- name: Upload binaries to release
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
tag: ${{ github.ref }}
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
docker push beryju/authentik-ldap:stable
|
||||
docker pull ghcr.io/goauthentik/ldap:latest
|
||||
docker tag ghcr.io/goauthentik/ldap:latest ghcr.io/goauthentik/ldap:stable
|
||||
docker push ghcr.io/goauthentik/ldap:stable
|
||||
test-release:
|
||||
needs:
|
||||
- build-server
|
||||
- build-outpost
|
||||
- build-outpost-binary
|
||||
- build-proxy
|
||||
- build-ldap
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
sudo apt-get install -y pwgen
|
||||
echo "PG_PASS=$(pwgen 40 1)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(pwgen 50 1)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
docker-compose run -u root server test
|
||||
sentry-release:
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
needs:
|
||||
- build-server
|
||||
- build-outpost
|
||||
- build-outpost-binary
|
||||
- test-release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
- name: Get static files from docker image
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Build web api client and web ui
|
||||
run: |
|
||||
docker pull ghcr.io/goauthentik/server:latest
|
||||
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
||||
docker cp ${container}:web/ .
|
||||
export NODE_ENV=production
|
||||
cd web
|
||||
npm i
|
||||
npm run build
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@v1
|
||||
continue-on-error: true
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: authentik-security-inc
|
||||
SENTRY_ORG: beryjuorg
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@${{ steps.ev.outputs.version }}
|
||||
version: authentik@2021.8.5
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
23
.github/workflows/release-tag.yml
vendored
23
.github/workflows/release-tag.yml
vendored
@ -10,30 +10,33 @@ jobs:
|
||||
name: Create Release from Tag
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker buildx install
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
sudo apt-get install -y pwgen
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
echo "PG_PASS=$(pwgen 40 1)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(pwgen 50 1)" >> .env
|
||||
docker-compose pull -q
|
||||
docker build \
|
||||
--no-cache \
|
||||
-t ghcr.io/goauthentik/server:latest \
|
||||
-f Dockerfile .
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
docker-compose run -u root server test
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@v4.1
|
||||
with:
|
||||
github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.1.4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
|
38
.github/workflows/translation-compile.yml
vendored
38
.github/workflows/translation-compile.yml
vendored
@ -1,38 +0,0 @@
|
||||
name: authentik-backend-translate-compile
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- '/locale/'
|
||||
pull_request:
|
||||
paths:
|
||||
- '/locale/'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run compile
|
||||
run: poetry run ./manage.py compilemessages
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
body: "core: compile backend translations"
|
||||
delete-branch: true
|
||||
signoff: true
|
38
.github/workflows/web-api-publish.yml
vendored
38
.github/workflows/web-api-publish.yml
vendored
@ -1,49 +1,39 @@
|
||||
name: authentik-web-api-publish
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- 'schema.yml'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/checkout@v2
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
run: make gen-web
|
||||
- name: Publish package
|
||||
working-directory: gen-ts-api/
|
||||
run: |
|
||||
npm ci
|
||||
cd web-api/
|
||||
npm i
|
||||
npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
||||
- name: Upgrade /web
|
||||
working-directory: web/
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
cd web/
|
||||
export VERSION=`node -e 'console.log(require("../web-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: update-web-api-client
|
||||
commit-message: "web: bump API Client version"
|
||||
title: "web: bump API Client version"
|
||||
body: "web: bump API Client version"
|
||||
commit-message: "web: Update Web API Client version"
|
||||
title: "web: Update Web API Client version"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
team-reviewers: "@goauthentik/core"
|
||||
author: authentik bot <github-bot@goauthentik.io>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
12
.gitignore
vendored
12
.gitignore
vendored
@ -66,9 +66,7 @@ coverage.xml
|
||||
unittest.xml
|
||||
|
||||
# Translations
|
||||
# Have to include binary mo files as they are annoying to compile at build time
|
||||
# since a full postgres and redis instance are required
|
||||
# *.mo
|
||||
*.mo
|
||||
|
||||
# Django stuff:
|
||||
|
||||
@ -193,10 +191,14 @@ pip-selfcheck.json
|
||||
# End of https://www.gitignore.io/api/python,django
|
||||
/static/
|
||||
local.env.yml
|
||||
.vscode/
|
||||
|
||||
# Selenium Screenshots
|
||||
selenium_screenshots/
|
||||
backups/
|
||||
media/
|
||||
*mmdb
|
||||
|
||||
.idea/
|
||||
/gen-*/
|
||||
data/
|
||||
/api/
|
||||
/web-api/
|
||||
|
36
.vscode/settings.json
vendored
36
.vscode/settings.json
vendored
@ -1,6 +1,5 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"akadmin",
|
||||
"asgi",
|
||||
"authentik",
|
||||
"authn",
|
||||
@ -11,40 +10,13 @@
|
||||
"plex",
|
||||
"saml",
|
||||
"totp",
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless",
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo"
|
||||
"webauthn"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"python.formatting.provider": "black",
|
||||
"yaml.customTags": [
|
||||
"!Find sequence",
|
||||
"!KeyOf scalar",
|
||||
"!Context scalar",
|
||||
"!Context sequence",
|
||||
"!Format sequence",
|
||||
"!Condition sequence",
|
||||
"!Env sequence",
|
||||
"!Env scalar"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||
"typescript.tsdk": "./web/node_modules/typescript/lib",
|
||||
"typescript.enablePromptUseWorkspaceTsdk": true,
|
||||
"yaml.schemas": {
|
||||
"./blueprints/schema.json": "blueprints/**/*.yaml"
|
||||
},
|
||||
"gitlens.autolinks": [
|
||||
{
|
||||
"alphanumeric": true,
|
||||
"prefix": "#<num>",
|
||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||
"ignoreCase": false
|
||||
}
|
||||
]
|
||||
"files.associations": {
|
||||
"*.akflow": "json"
|
||||
}
|
||||
}
|
||||
|
86
.vscode/tasks.json
vendored
86
.vscode/tasks.json
vendored
@ -1,86 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "authentik[core]: format & test",
|
||||
"command": "poetry",
|
||||
"args": [
|
||||
"run",
|
||||
"make"
|
||||
],
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "authentik[core]: run",
|
||||
"command": "poetry",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"run",
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
"group": "running"
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "authentik[web]: format",
|
||||
"command": "make",
|
||||
"args": ["web"],
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "authentik[web]: watch",
|
||||
"command": "make",
|
||||
"args": ["web-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
"group": "running"
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "authentik: install",
|
||||
"command": "make",
|
||||
"args": ["install"],
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "authentik: i18n-extract",
|
||||
"command": "poetry",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"i18n-extract"
|
||||
],
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "authentik[website]: format",
|
||||
"command": "make",
|
||||
"args": ["website"],
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "authentik[website]: watch",
|
||||
"command": "make",
|
||||
"args": ["website-watch"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"panel": "dedicated",
|
||||
"group": "running"
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "authentik[api]: generate",
|
||||
"command": "poetry",
|
||||
"args": [
|
||||
"run",
|
||||
"make",
|
||||
"gen"
|
||||
],
|
||||
"group": "build"
|
||||
},
|
||||
]
|
||||
}
|
@ -17,24 +17,24 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
@ -60,7 +60,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
hello@goauthentik.io.
|
||||
hello@beryju.org.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
|
@ -11,22 +11,19 @@ The following is a set of guidelines for contributing to authentik and its compo
|
||||
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
||||
|
||||
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
||||
|
||||
- [The components](#the-components)
|
||||
- [authentik's structure](#authentiks-structure)
|
||||
* [The components](#the-components)
|
||||
* [authentik's structure](#authentiks-structure)
|
||||
|
||||
[How Can I Contribute?](#how-can-i-contribute)
|
||||
|
||||
- [Reporting Bugs](#reporting-bugs)
|
||||
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||
- [Your First Code Contribution](#your-first-code-contribution)
|
||||
- [Pull Requests](#pull-requests)
|
||||
* [Reporting Bugs](#reporting-bugs)
|
||||
* [Suggesting Enhancements](#suggesting-enhancements)
|
||||
* [Your First Code Contribution](#your-first-code-contribution)
|
||||
* [Pull Requests](#pull-requests)
|
||||
|
||||
[Styleguides](#styleguides)
|
||||
|
||||
- [Git Commit Messages](#git-commit-messages)
|
||||
- [Python Styleguide](#python-styleguide)
|
||||
- [Documentation Styleguide](#documentation-styleguide)
|
||||
* [Git Commit Messages](#git-commit-messages)
|
||||
* [Python Styleguide](#python-styleguide)
|
||||
* [Documentation Styleguide](#documentation-styleguide)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
@ -34,7 +31,7 @@ Basically, don't be a dickhead. This is an open-source non-profit project, that
|
||||
|
||||
## I don't want to read this whole thing I just have a question!!!
|
||||
|
||||
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://goauthentik.io/discord)
|
||||
Either [create a question on GitHub](https://github.com/goauthentik/authentik/issues/new?assignees=&labels=question&template=question.md&title=) or join [the Discord server](https://discord.gg/jg33eMhnj6)
|
||||
|
||||
## What should I know before I get started?
|
||||
|
||||
@ -42,11 +39,11 @@ Either [create a question on GitHub](https://github.com/goauthentik/authentik/is
|
||||
|
||||
authentik consists of a few larger components:
|
||||
|
||||
- _authentik_ the actual application server, is described below.
|
||||
- _outpost-proxy_ is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- _outpost-ldap_ is a Go LDAP server that uses the _authentik_ application server as its backend
|
||||
- _web_ is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- _website_ is the Website/documentation, which uses docusaurus.
|
||||
- *authentik* the actual application server, is described below.
|
||||
- *outpost-proxy* is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- *outpost-ldap* is a Go LDAP server that uses the *authentik* application server as its backend
|
||||
- *web* is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- *website* is the Website/documentation, which uses docusaurus.
|
||||
|
||||
### authentik's structure
|
||||
|
||||
@ -59,18 +56,19 @@ These are the current packages:
|
||||
authentik
|
||||
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
||||
├── api - General API Configuration (Routes, Schema and general API utilities)
|
||||
├── blueprints - Handle managed models and their state.
|
||||
├── core - Core authentik functionality, central routes, core Models
|
||||
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
||||
├── events - Event Log, middleware and signals to generate signals
|
||||
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
||||
├── lib - Generic library of functions, few dependencies on other packages.
|
||||
├── managed - Handle managed models and their state.
|
||||
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
||||
├── policies - General PolicyEngine
|
||||
│ ├── dummy - A Dummy policy used for testing
|
||||
│ ├── event_matcher - Match events based on different criteria
|
||||
│ ├── expiry - Check when a user's password was last set
|
||||
│ ├── expression - Execute any arbitrary python code
|
||||
│ ├── hibp - Check a password against HaveIBeenPwned
|
||||
│ ├── password - Check a password against several rules
|
||||
│ └── reputation - Check the user's/client's reputation
|
||||
├── providers
|
||||
@ -119,7 +117,7 @@ This section guides you through submitting a bug report for authentik. Following
|
||||
|
||||
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
|
||||
|
||||
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
|
||||
This form will have the full stack trace of the error that ocurred and shouldn't contain any sensitive data.
|
||||
|
||||
### Suggesting Enhancements
|
||||
|
||||
@ -133,16 +131,16 @@ When you are creating an enhancement suggestion, please fill in [the template](h
|
||||
|
||||
authentik can be run locally, all though depending on which part you want to work on, different pre-requisites are required.
|
||||
|
||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/)
|
||||
|
||||
### Pull Requests
|
||||
|
||||
The process described here has several goals:
|
||||
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
@ -156,10 +154,10 @@ While the prerequisites above must be satisfied prior to having your pull reques
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
- Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
- Reference issues and pull requests liberally after the first line
|
||||
* Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
* Reference issues and pull requests liberally after the first line
|
||||
|
||||
### Python Styleguide
|
||||
|
||||
@ -167,11 +165,11 @@ All Python code is linted with [black](https://black.readthedocs.io/en/stable/),
|
||||
|
||||
authentik runs on Python 3.9 at the time of writing this.
|
||||
|
||||
- Use native type-annotations wherever possible.
|
||||
- Add meaningful docstrings when possible.
|
||||
- Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
- If your code changes central functions, make sure nothing else is broken.
|
||||
* Use native type-annotations wherever possible.
|
||||
* Add meaningful docstrings when possible.
|
||||
* Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
* If your code changes central functions, make sure nothing else is broken.
|
||||
|
||||
### Documentation Styleguide
|
||||
|
||||
- Use [MDX](https://mdxjs.com/) whenever appropriate.
|
||||
* Use [MDX](https://mdxjs.com/) whenever appropriate.
|
||||
|
139
Dockerfile
139
Dockerfile
@ -1,118 +1,103 @@
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||
# Stage 1: Lock python dependencies
|
||||
FROM python:3.9-slim-buster as locker
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./SECURITY.md /work/
|
||||
COPY ./Pipfile /app/
|
||||
COPY ./Pipfile.lock /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN pip install pipenv && \
|
||||
pipenv lock -r > requirements.txt && \
|
||||
pipenv lock -r --dev-only > requirements-dev.txt
|
||||
|
||||
# Stage 2: Build website
|
||||
FROM node as website-builder
|
||||
|
||||
COPY ./website /static/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
RUN npm ci && npm run build-docs-only
|
||||
RUN cd /static && npm i && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
|
||||
# Stage 3: Generate API Client
|
||||
FROM openapitools/openapi-generator-cli as go-api-builder
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
COPY ./schema.yml /local/schema.yml
|
||||
|
||||
RUN docker-entrypoint.sh generate \
|
||||
--git-host goauthentik.io \
|
||||
--git-repo-id outpost \
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true && \
|
||||
rm -f /local/api/go.mod /local/api/go.sum
|
||||
|
||||
# Stage 4: Build webui
|
||||
FROM node as web-builder
|
||||
|
||||
COPY ./web /static/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/web
|
||||
RUN npm ci && npm run build
|
||||
RUN cd /static && npm i && npm run build
|
||||
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
COPY ./poetry.lock /work
|
||||
|
||||
RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --output requirements.txt && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.19.5-bullseye AS go-builder
|
||||
# Stage 5: Build go proxy
|
||||
FROM golang:1.17.0 AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||
COPY --from=web-builder /static/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /static/security.txt /work/web/security.txt
|
||||
COPY --from=web-builder /static/dist/ /work/web/dist/
|
||||
COPY --from=web-builder /static/authentik/ /work/web/authentik/
|
||||
COPY --from=website-builder /static/help/ /work/website/help/
|
||||
|
||||
COPY --from=go-api-builder /local/api api
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./website/static.go /work/website/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
|
||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "\
|
||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
|
||||
"
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
|
||||
FROM python:3.9-slim-buster
|
||||
|
||||
WORKDIR /
|
||||
COPY --from=locker /app/requirements.txt /
|
||||
COPY --from=locker /app/requirements-dev.txt /
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends curl runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \
|
||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
||||
pip install -r /requirements.txt --no-cache-dir && \
|
||||
apt-get remove --purge -y build-essential git && \
|
||||
apt-get autoremove --purge -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
mkdir -p /certs /media /blueprints && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
chown authentik:authentik /certs /media /authentik/.ssh
|
||||
mkdir /backups && \
|
||||
chown authentik:authentik /backups
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./xml /xml
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /work/authentik /authentik-proxy
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
|
||||
USER 1000
|
||||
COPY --from=builder /work/authentik /authentik-proxy
|
||||
|
||||
USER authentik
|
||||
ENV TMPDIR /dev/shm/
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONUBUFFERED 1
|
||||
ENV prometheus_multiproc_dir /dev/shm/
|
||||
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||
|
||||
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
|
||||
ENTRYPOINT [ "/lifecycle/ak" ]
|
||||
|
687
LICENSE
687
LICENSE
@ -1,21 +1,674 @@
|
||||
MIT License
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (c) 2022 Jens Langhammer
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
187
Makefile
187
Makefile
@ -4,191 +4,68 @@ UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
|
||||
all: lint-fix lint test gen web
|
||||
all: lint-fix lint test gen
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
test-integration:
|
||||
coverage run manage.py test -v 3 tests/integration
|
||||
|
||||
test-docker:
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test
|
||||
rm -f .env
|
||||
test-e2e:
|
||||
coverage run manage.py test --failfast -v 3 tests/e2e
|
||||
|
||||
test:
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage run manage.py test -v 3 authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix:
|
||||
isort authentik tests scripts lifecycle
|
||||
black authentik tests scripts lifecycle
|
||||
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/docs \
|
||||
website/developer-docs
|
||||
isort authentik tests lifecycle
|
||||
black authentik tests lifecycle
|
||||
|
||||
lint:
|
||||
pylint authentik tests lifecycle
|
||||
pyright authentik tests lifecycle
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v ./cmd/server/
|
||||
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
|
||||
i18n-extract-core:
|
||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
|
||||
#########################
|
||||
## API Schema
|
||||
#########################
|
||||
pylint authentik tests lifecycle
|
||||
|
||||
gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-diff:
|
||||
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
./manage.py spectacular --file schema.yml
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
rm -rf api/
|
||||
|
||||
gen-client-ts:
|
||||
gen-web:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
-c /local/scripts/api-ts-config.yaml \
|
||||
--additional-properties=npmVersion=${NPM_VERSION} \
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
-o /local/web-api \
|
||||
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=@goauthentik/api,npmVersion=${NPM_VERSION}
|
||||
mkdir -p web/node_modules/@goauthentik/api
|
||||
cd gen-ts-api && npm i
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
python -m scripts.web_api_esm
|
||||
\cp -fv scripts/web_api_readme.md web-api/README.md
|
||||
cd web-api && npm i
|
||||
\cp -rfv web-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go:
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||
mkdir -p templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||
gen-outpost:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
--git-host goauthentik.io \
|
||||
--git-repo-id outpost \
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/gen-go-api \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf config.yaml ./templates/
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,disallowAdditionalPropertiesIfNotPresent=false
|
||||
rm -f api/go.mod api/go.sum
|
||||
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
gen: gen-build gen-clean gen-web gen-outpost
|
||||
|
||||
gen: gen-build gen-clean gen-client-ts
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
#########################
|
||||
## Web
|
||||
#########################
|
||||
|
||||
web-build: web-install
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile
|
||||
|
||||
web-install:
|
||||
cd web && npm ci
|
||||
|
||||
web-watch:
|
||||
rm -rf web/dist/
|
||||
mkdir web/dist/
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-lint-fix:
|
||||
cd web && npm run prettier
|
||||
|
||||
web-lint:
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
cd web && npm run tsc
|
||||
|
||||
web-extract:
|
||||
cd web && npm run extract
|
||||
|
||||
#########################
|
||||
## Website
|
||||
#########################
|
||||
|
||||
website: website-lint-fix website-build
|
||||
|
||||
website-install:
|
||||
cd website && npm ci
|
||||
|
||||
website-lint-fix:
|
||||
cd website && npm run prettier
|
||||
|
||||
website-build:
|
||||
cd website && npm run build
|
||||
|
||||
website-watch:
|
||||
cd website && npm run watch
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
PY_SOURCES=authentik tests lifecycle
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-pylint: ci--meta-debug
|
||||
pylint $(PY_SOURCES)
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
bandit -r $(PY_SOURCES)
|
||||
|
||||
ci-pyright: ci--meta-debug
|
||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
||||
install: web-install website-install
|
||||
poetry install
|
||||
|
||||
dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
make migrate
|
||||
run:
|
||||
go run -v cmd/server/main.go
|
||||
|
66
Pipfile
Normal file
66
Pipfile
Normal file
@ -0,0 +1,66 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[packages]
|
||||
boto3 = "*"
|
||||
celery = "*"
|
||||
channels = "*"
|
||||
channels-redis = "*"
|
||||
dacite = "*"
|
||||
defusedxml = "*"
|
||||
django = "*"
|
||||
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
|
||||
django-filter = "*"
|
||||
django-guardian = "*"
|
||||
django-model-utils = "*"
|
||||
django-otp = "*"
|
||||
django-prometheus = "*"
|
||||
django-redis = "*"
|
||||
django-storages = "*"
|
||||
djangorestframework = "*"
|
||||
djangorestframework-guardian = "*"
|
||||
docker = "*"
|
||||
drf-spectacular = "*"
|
||||
facebook-sdk = "*"
|
||||
geoip2 = "*"
|
||||
gunicorn = "*"
|
||||
kubernetes = "*"
|
||||
ldap3 = "*"
|
||||
lxml = ">=4.6.3"
|
||||
packaging = "*"
|
||||
psycopg2-binary = "*"
|
||||
pycryptodome = "*"
|
||||
pyjwt = "*"
|
||||
pyyaml = "*"
|
||||
requests-oauthlib = "*"
|
||||
sentry-sdk = "*"
|
||||
service_identity = "*"
|
||||
structlog = "*"
|
||||
swagger-spec-validator = "*"
|
||||
twisted = "==21.7.0"
|
||||
urllib3 = {extras = ["secure"],version = "*"}
|
||||
uvicorn = {extras = ["standard"],version = "*"}
|
||||
webauthn = "*"
|
||||
xmlsec = "*"
|
||||
duo-client = "*"
|
||||
ua-parser = "*"
|
||||
deepmerge = "*"
|
||||
colorama = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
|
||||
[dev-packages]
|
||||
bandit = "*"
|
||||
black = "==21.5b1"
|
||||
bump2version = "*"
|
||||
colorama = "*"
|
||||
coverage = "*"
|
||||
pylint = "*"
|
||||
pylint-django = "*"
|
||||
pytest = "*"
|
||||
pytest-django = "*"
|
||||
selenium = "*"
|
||||
requests-mock = "*"
|
1879
Pipfile.lock
generated
Normal file
1879
Pipfile.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
45
README.md
45
README.md
@ -4,14 +4,15 @@
|
||||
|
||||
---
|
||||
|
||||
[](https://goauthentik.io/discord)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://discord.gg/jg33eMhnj6)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||
[](https://goauthentik.testspace.com/)
|
||||

|
||||

|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
[](https://www.transifex.com/beryjuorg/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
@ -19,41 +20,21 @@ authentik is an open-source Identity Provider focused on flexibility and versati
|
||||
|
||||
## Installation
|
||||
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/)
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/)
|
||||
|
||||
## Screenshots
|
||||
|
||||
| Light | Dark |
|
||||
| ------------------------------------------------------ | ----------------------------------------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
Light | Dark
|
||||
--- | ---
|
||||
 | 
|
||||
 | 
|
||||
|
||||
## Development
|
||||
|
||||
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
See [Development Documentation](https://goauthentik.io/developer-docs/)
|
||||
|
||||
## Security
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
|
||||
<p>
|
||||
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
42
SECURITY.md
42
SECURITY.md
@ -1,44 +1,14 @@
|
||||
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | ------------------ |
|
||||
| 2022.12.x | :white_check_mark: |
|
||||
| 2023.1.x | :white_check_mark: |
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2021.7.x | :white_check_mark: |
|
||||
| 2021.8.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
||||
|
||||
## Criticality levels
|
||||
|
||||
### High
|
||||
|
||||
- Authorization bypass
|
||||
- Circumvention of policies
|
||||
|
||||
### Moderate
|
||||
|
||||
- Denial-of-Service attacks
|
||||
|
||||
### Low
|
||||
|
||||
- Unvalidated redirects
|
||||
- Issues requiring uncommon setups
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Issue is reported via Email as listed above.
|
||||
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||
3. A criticality level is assigned.
|
||||
4. A fix is created, and if possible tested by the issue reporter.
|
||||
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
||||
7. The fixed version is released for the supported versions.
|
||||
|
||||
## Getting security notifications
|
||||
|
||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||
To report a vulnerability, send an email to [security@beryju.org](mailto:security@beryju.org)
|
||||
|
@ -1,20 +1,3 @@
|
||||
"""authentik"""
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.1.3"
|
||||
__version__ = "2021.8.5"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
||||
"""Get build hash"""
|
||||
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||
return fallback if build_hash == "" and fallback else build_hash
|
||||
|
||||
|
||||
def get_full_version() -> str:
|
||||
"""Get full version, with build hash appended"""
|
||||
version = __version__
|
||||
if (build_hash := get_build_hash()) != "":
|
||||
version += "." + build_hash
|
||||
return version
|
||||
|
@ -18,13 +18,13 @@ class AppSerializer(PassiveSerializer):
|
||||
|
||||
|
||||
class AppsViewSet(ViewSet):
|
||||
"""Read-only view list all installed apps"""
|
||||
"""Read-only view set list all installed apps"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Read-only view list all installed apps"""
|
||||
"""List current messages and pass into Serializer"""
|
||||
data = []
|
||||
for app in sorted(get_apps(), key=lambda app: app.name):
|
||||
data.append({"name": app.name, "label": app.verbose_name})
|
||||
|
@ -1,9 +1,13 @@
|
||||
"""authentik administration metrics"""
|
||||
import time
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models import Count, ExpressionWrapper, F
|
||||
from django.db.models.fields import DurationField
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -11,7 +15,31 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
|
||||
def get_events_per_1h(**filter_kwargs) -> list[dict[str, int]]:
|
||||
"""Get event count by hour in the last day, fill with zeros"""
|
||||
date_from = now() - timedelta(days=1)
|
||||
result = (
|
||||
Event.objects.filter(created__gte=date_from, **filter_kwargs)
|
||||
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
|
||||
.annotate(age_hours=ExtractHour("age"))
|
||||
.values("age_hours")
|
||||
.annotate(count=Count("pk"))
|
||||
.order_by("age_hours")
|
||||
)
|
||||
data = Counter({int(d["age_hours"]): d["count"] for d in result})
|
||||
results = []
|
||||
_now = now()
|
||||
for hour in range(0, -24, -1):
|
||||
results.append(
|
||||
{
|
||||
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||
"y_cord": data[hour * -1],
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
class CoordinateSerializer(PassiveSerializer):
|
||||
@ -24,45 +52,18 @@ class CoordinateSerializer(PassiveSerializer):
|
||||
class LoginMetricsSerializer(PassiveSerializer):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
return get_events_per_1h(action=EventAction.LOGIN)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations(self, _):
|
||||
"""Get successful authorizations per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
return get_events_per_1h(action=EventAction.LOGIN_FAILED)
|
||||
|
||||
|
||||
class AdministrationMetricsViewSet(APIView):
|
||||
@ -74,5 +75,4 @@ class AdministrationMetricsViewSet(APIView):
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Login Metrics per 1h"""
|
||||
serializer = LoginMetricsSerializer(True)
|
||||
serializer.context["user"] = request.user
|
||||
return Response(serializer.data)
|
||||
|
@ -8,6 +8,7 @@ from typing import TypedDict
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from gunicorn import version_info as gunicorn_version
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -15,8 +16,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.managed import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
|
||||
@ -69,7 +69,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
return {
|
||||
"python_version": python_version,
|
||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
||||
"environment": get_env(),
|
||||
"environment": "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose",
|
||||
"architecture": platform.machine(),
|
||||
"platform": platform.platform(),
|
||||
"uname": " ".join(platform.uname()),
|
||||
@ -84,9 +84,9 @@ class SystemSerializer(PassiveSerializer):
|
||||
return now()
|
||||
|
||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||
"""Get the FQDN configured on the embedded outpost"""
|
||||
"""Get the FQDN configured on the embeddded outpost"""
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts.exists(): # pragma: no cover
|
||||
if not outposts.exists():
|
||||
return ""
|
||||
return outposts.first().config.authentik_host
|
||||
|
||||
|
@ -5,26 +5,17 @@ from django.contrib import messages
|
||||
from django.http.response import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class TaskSerializer(PassiveSerializer):
|
||||
"""Serialize TaskInfo and TaskResult"""
|
||||
@ -32,7 +23,6 @@ class TaskSerializer(PassiveSerializer):
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||
task_duration = SerializerMethodField()
|
||||
|
||||
status = ChoiceField(
|
||||
source="result.status.name",
|
||||
@ -40,17 +30,13 @@ class TaskSerializer(PassiveSerializer):
|
||||
)
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||
"""Get the duration a task took to run"""
|
||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||
|
||||
def to_representation(self, instance: TaskInfo):
|
||||
def to_representation(self, instance):
|
||||
"""When a new version of authentik adds fields to TaskInfo,
|
||||
the API will fail with an AttributeError, as the classes
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
except AttributeError: # pragma: no cover
|
||||
except AttributeError:
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
@ -69,16 +55,9 @@ class TaskViewSet(ViewSet):
|
||||
responses={
|
||||
200: TaskSerializer(many=False),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
}
|
||||
)
|
||||
# pylint: disable=invalid-name
|
||||
def retrieve(self, request: Request, pk=None) -> Response:
|
||||
"""Get a single system task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
@ -99,16 +78,9 @@ class TaskViewSet(ViewSet):
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
500: OpenApiResponse(description="Failed to retry task"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
@ -117,15 +89,13 @@ class TaskViewSet(ViewSet):
|
||||
try:
|
||||
task_module = import_module(task.task_call_module)
|
||||
task_func = getattr(task_module, task.task_call_func)
|
||||
LOGGER.debug("Running task", task=task_func)
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(status=204)
|
||||
except (ImportError, AttributeError): # pragma: no cover
|
||||
LOGGER.warning("Failed to run task, remove state", task=task)
|
||||
except ImportError: # pragma: no cover
|
||||
# if we get an import error, the module path has probably changed
|
||||
task.delete()
|
||||
return Response(status=500)
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""authentik administration overview"""
|
||||
from os import environ
|
||||
|
||||
from django.core.cache import cache
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from packaging.version import parse
|
||||
@ -8,7 +10,7 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
@ -23,7 +25,7 @@ class VersionSerializer(PassiveSerializer):
|
||||
|
||||
def get_build_hash(self, _) -> str:
|
||||
"""Get build hash, if version is not latest or released"""
|
||||
return get_build_hash()
|
||||
return environ.get(ENV_GIT_HASH_KEY, "")
|
||||
|
||||
def get_version_current(self, _) -> str:
|
||||
"""Get current version"""
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""authentik administration overview"""
|
||||
from django.conf import settings
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from prometheus_client import Gauge
|
||||
from rest_framework.fields import IntegerField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -9,6 +9,8 @@ from rest_framework.views import APIView
|
||||
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
|
||||
|
||||
|
||||
class WorkerView(APIView):
|
||||
"""Get currently connected worker count."""
|
||||
@ -19,7 +21,4 @@ class WorkerView(APIView):
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get currently connected worker count."""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
# In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
count += 1
|
||||
return Response({"count": count})
|
||||
|
@ -1,20 +1,10 @@
|
||||
"""authentik admin app config"""
|
||||
from prometheus_client import Gauge, Info
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AuthentikAdminConfig(ManagedAppConfig):
|
||||
class AuthentikAdminConfig(AppConfig):
|
||||
"""authentik admin app config"""
|
||||
|
||||
name = "authentik.admin"
|
||||
label = "authentik_admin"
|
||||
verbose_name = "authentik Admin"
|
||||
default = True
|
||||
|
||||
def reconcile_load_admin_signals(self):
|
||||
"""Load admin signals"""
|
||||
self.import_module("authentik.admin.signals")
|
||||
|
@ -1,12 +1,10 @@
|
||||
"""authentik admin settings"""
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"admin_latest_version": {
|
||||
"task": "authentik.admin.tasks.update_latest_version",
|
||||
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
|
||||
"schedule": crontab(minute="*/60"), # Run every hour
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
}
|
||||
|
@ -1,21 +0,0 @@
|
||||
"""admin signals"""
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.admin.api.tasks import TaskInfo
|
||||
from authentik.admin.apps import GAUGE_WORKERS
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
GAUGE_WORKERS.set(count)
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
@ -1,24 +1,17 @@
|
||||
"""authentik admin tasks"""
|
||||
import re
|
||||
from os import environ
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import URLValidator
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from packaging.version import parse
|
||||
from requests import RequestException
|
||||
from prometheus_client import Info
|
||||
from requests import RequestException, get
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.apps import PROM_INFO
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -26,7 +19,7 @@ VERSION_CACHE_KEY = "authentik_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||
# Chop of the first ^ because we want to search the entire string
|
||||
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||
LOCAL_VERSION = parse(__version__)
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
|
||||
|
||||
def _set_prom_info():
|
||||
@ -35,40 +28,20 @@ def _set_prom_info():
|
||||
{
|
||||
"version": __version__,
|
||||
"latest": cache.get(VERSION_CACHE_KEY, ""),
|
||||
"build_hash": get_build_hash(),
|
||||
"build_hash": environ.get(ENV_GIT_HASH_KEY, ""),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
def clear_update_notifications():
|
||||
"""Clear update notifications on startup if the notification was for the version
|
||||
we're running now."""
|
||||
for notification in Notification.objects.filter(event__action=EventAction.UPDATE_AVAILABLE):
|
||||
if "new_version" not in notification.event.context:
|
||||
continue
|
||||
notification_version = notification.event.context["new_version"]
|
||||
if LOCAL_VERSION >= parse(notification_version):
|
||||
notification.delete()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.y_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||
return
|
||||
try:
|
||||
response = get_http_session().get(
|
||||
"https://version.goauthentik.io/version.json",
|
||||
)
|
||||
response = get("https://api.github.com/repos/goauthentik/authentik/releases/latest")
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
upstream_version = data.get("stable", {}).get("version")
|
||||
tag_name = data.get("tag_name")
|
||||
upstream_version = tag_name.split("/")[1]
|
||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(
|
||||
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
||||
@ -76,7 +49,8 @@ def update_latest_version(self: MonitoredTask):
|
||||
_set_prom_info()
|
||||
# Check if upstream version is newer than what we're running,
|
||||
# and if no event exists yet, create one.
|
||||
if LOCAL_VERSION < parse(upstream_version):
|
||||
local_version = parse(__version__)
|
||||
if local_version < parse(upstream_version):
|
||||
# Event has already been created, don't create duplicate
|
||||
if Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
@ -84,7 +58,7 @@ def update_latest_version(self: MonitoredTask):
|
||||
).exists():
|
||||
return
|
||||
event_dict = {"new_version": upstream_version}
|
||||
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
|
||||
if match := re.search(URL_FINDER, data.get("body", "")):
|
||||
event_dict["message"] = f"Changelog: {match.group()}"
|
||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
|
@ -5,7 +5,6 @@ from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
|
||||
from authentik import __version__
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
@ -93,7 +92,6 @@ class TestAdminAPI(TestCase):
|
||||
response = self.client.get(reverse("authentik_api:apps-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_system(self):
|
||||
"""Test system API"""
|
||||
response = self.client.get(reverse("authentik_api:admin_system"))
|
||||
|
@ -1,83 +1,81 @@
|
||||
"""test admin tasks"""
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.test import TestCase
|
||||
from requests_mock import Mocker
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from authentik.admin.tasks import (
|
||||
VERSION_CACHE_KEY,
|
||||
clear_update_notifications,
|
||||
update_latest_version,
|
||||
)
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
RESPONSE_VALID = {
|
||||
"$schema": "https://version.goauthentik.io/schema.json",
|
||||
"stable": {
|
||||
"version": "99999999.9999999",
|
||||
"changelog": "See https://goauthentik.io/test",
|
||||
"reason": "bugfix",
|
||||
},
|
||||
}
|
||||
|
||||
@dataclass
|
||||
class MockResponse:
|
||||
"""Mock class to emulate the methods of requests's Response we need"""
|
||||
|
||||
status_code: int
|
||||
response: str
|
||||
|
||||
def json(self) -> dict:
|
||||
"""Get json parsed response"""
|
||||
return json.loads(self.response)
|
||||
|
||||
def raise_for_status(self):
|
||||
"""raise RequestException if status code is 400 or more"""
|
||||
if self.status_code >= 400:
|
||||
raise RequestException
|
||||
|
||||
|
||||
REQUEST_MOCK_VALID = Mock(
|
||||
return_value=MockResponse(
|
||||
200,
|
||||
"""{
|
||||
"tag_name": "version/99999999.9999999",
|
||||
"body": "https://goauthentik.io/test"
|
||||
}""",
|
||||
)
|
||||
)
|
||||
|
||||
REQUEST_MOCK_INVALID = Mock(return_value=MockResponse(400, "{}"))
|
||||
|
||||
|
||||
class TestAdminTasks(TestCase):
|
||||
"""test admin tasks"""
|
||||
|
||||
@patch("authentik.admin.tasks.get", REQUEST_MOCK_VALID)
|
||||
def test_version_valid_response(self):
|
||||
"""Test Update checker with valid response"""
|
||||
with Mocker() as mocker, CONFIG.patch("disable_update_check", False):
|
||||
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
|
||||
self.assertTrue(
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
).exists()
|
||||
)
|
||||
# test that a consecutive check doesn't create a duplicate event
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(
|
||||
len(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
).exists()
|
||||
)
|
||||
# test that a consecutive check doesn't create a duplicate event
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(
|
||||
len(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
)
|
||||
),
|
||||
1,
|
||||
)
|
||||
)
|
||||
),
|
||||
1,
|
||||
)
|
||||
|
||||
@patch("authentik.admin.tasks.get", REQUEST_MOCK_INVALID)
|
||||
def test_version_error(self):
|
||||
"""Test Update checker with invalid response"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get("https://version.goauthentik.io/version.json", status_code=400)
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
self.assertFalse(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_version_disabled(self):
|
||||
"""Test Update checker while its disabled"""
|
||||
with CONFIG.patch("disable_update_check", True):
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
|
||||
def test_clear_update_notifications(self):
|
||||
"""Test clear of previous notification"""
|
||||
Event.objects.create(
|
||||
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
|
||||
)
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
|
||||
clear_update_notifications()
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
self.assertFalse(
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="1.1"
|
||||
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
|
||||
).exists()
|
||||
)
|
||||
|
@ -31,5 +31,4 @@ class AuthentikAPIConfig(AppConfig):
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
"scheme": "bearer",
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""API Authentication"""
|
||||
from typing import Any, Optional
|
||||
from base64 import b64decode
|
||||
from binascii import Error
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
@ -7,66 +9,49 @@ from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.middleware import CTX_AUTH_VIA
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def validate_auth(header: bytes) -> Optional[str]:
|
||||
"""Validate that the header is in a correct format,
|
||||
returns type and credentials"""
|
||||
auth_credentials = header.decode().strip()
|
||||
# pylint: disable=too-many-return-statements
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer dGVzdDp0ZXN0`"""
|
||||
auth_credentials = raw_header.decode()
|
||||
if auth_credentials == "" or " " not in auth_credentials:
|
||||
return None
|
||||
auth_type, _, auth_credentials = auth_credentials.partition(" ")
|
||||
if auth_type.lower() != "bearer":
|
||||
if auth_type.lower() not in ["basic", "bearer"]:
|
||||
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
||||
raise AuthenticationFailed("Unsupported authentication type")
|
||||
if auth_credentials == "": # nosec # noqa
|
||||
password = auth_credentials
|
||||
if auth_type.lower() == "basic":
|
||||
try:
|
||||
auth_credentials = b64decode(auth_credentials.encode()).decode()
|
||||
except (UnicodeDecodeError, Error):
|
||||
raise AuthenticationFailed("Malformed header")
|
||||
# Accept credentials with username and without
|
||||
if ":" in auth_credentials:
|
||||
_, _, password = auth_credentials.partition(":")
|
||||
else:
|
||||
password = auth_credentials
|
||||
if password == "": # nosec
|
||||
raise AuthenticationFailed("Malformed header")
|
||||
return auth_credentials
|
||||
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
auth_credentials = validate_auth(raw_header)
|
||||
if not auth_credentials:
|
||||
return None
|
||||
# first, check traditional tokens
|
||||
key_token = Token.filter_not_expired(
|
||||
key=auth_credentials, intent=TokenIntents.INTENT_API
|
||||
).first()
|
||||
if key_token:
|
||||
CTX_AUTH_VIA.set("api_token")
|
||||
return key_token.user
|
||||
# then try to auth via JWT
|
||||
jwt_token = RefreshToken.filter_not_expired(
|
||||
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
).first()
|
||||
if jwt_token:
|
||||
# Double-check scopes, since they are saved in a single string
|
||||
# we want to check the parsed version too
|
||||
if SCOPE_AUTHENTIK_API not in jwt_token.scope:
|
||||
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
|
||||
if not tokens.exists():
|
||||
LOGGER.info("Authenticating via secret_key")
|
||||
user = token_secret_key(password)
|
||||
if not user:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
CTX_AUTH_VIA.set("jwt")
|
||||
return jwt_token.user
|
||||
# then try to auth via secret key (for embedded outpost/etc)
|
||||
user = token_secret_key(auth_credentials)
|
||||
if user:
|
||||
CTX_AUTH_VIA.set("secret_key")
|
||||
return user
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return tokens.first().user
|
||||
|
||||
|
||||
def token_secret_key(value: str) -> Optional[User]:
|
||||
"""Check if the token is the secret key
|
||||
and return the service account for the managed outpost"""
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.managed import MANAGED_OUTPOST
|
||||
|
||||
if value != settings.SECRET_KEY:
|
||||
return None
|
||||
@ -80,7 +65,7 @@ def token_secret_key(value: str) -> Optional[User]:
|
||||
class TokenAuthentication(BaseAuthentication):
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
|
||||
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||
def authenticate(self, request: Request) -> Union[tuple[User, Any], None]:
|
||||
"""Token-based authentication using HTTP Bearer authentication"""
|
||||
auth = get_authorization_header(request)
|
||||
|
||||
|
@ -1,15 +1,9 @@
|
||||
"""API Authorization"""
|
||||
from django.conf import settings
|
||||
from django.db.models import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
@ -18,25 +12,9 @@ class OwnerFilter(BaseFilterBackend):
|
||||
owner_key = "user"
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
if request.user.is_superuser:
|
||||
return queryset
|
||||
return queryset.filter(**{self.owner_key: request.user})
|
||||
|
||||
|
||||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class OwnerPermissions(BasePermission):
|
||||
"""Authorize requests by an object's owner matching the requesting user"""
|
||||
|
||||
@ -55,12 +33,3 @@ class OwnerPermissions(BasePermission):
|
||||
if owner != request.user:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class OwnerSuperuserPermissions(OwnerPermissions):
|
||||
"""Similar to OwnerPermissions, except always allow access for superusers"""
|
||||
|
||||
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
|
||||
if request.user.is_superuser:
|
||||
return True
|
||||
return super().has_object_permission(request, view, obj)
|
||||
|
@ -5,9 +5,6 @@ from typing import Callable, Optional
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||
@ -21,12 +18,10 @@ def permission_required(perm: Optional[str] = None, other_perms: Optional[list[s
|
||||
if perm:
|
||||
obj = self.get_object()
|
||||
if not request.user.has_perm(perm, obj):
|
||||
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||
return self.permission_denied(request)
|
||||
if other_perms:
|
||||
for other_perm in other_perms:
|
||||
if not request.user.has_perm(other_perm):
|
||||
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||
return self.permission_denied(request)
|
||||
return func(self, request, *args, **kwargs)
|
||||
|
||||
|
@ -11,7 +11,7 @@ from drf_spectacular.types import OpenApiTypes
|
||||
|
||||
|
||||
def build_standard_type(obj, **kwargs):
|
||||
"""Build a basic type with optional add owns."""
|
||||
"""Build a basic type with optional add ons."""
|
||||
schema = build_basic_type(obj)
|
||||
schema.update(kwargs)
|
||||
return schema
|
||||
@ -31,7 +31,7 @@ VALIDATION_ERROR = build_object_type(
|
||||
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=[],
|
||||
required=["detail"],
|
||||
additionalProperties={},
|
||||
)
|
||||
|
||||
@ -60,28 +60,8 @@ def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
|
||||
|
||||
for path in result["paths"].values():
|
||||
for method in path.values():
|
||||
method["responses"].setdefault(
|
||||
"400",
|
||||
{
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": validation_error.ref,
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
},
|
||||
)
|
||||
method["responses"].setdefault(
|
||||
"403",
|
||||
{
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": generic_error.ref,
|
||||
}
|
||||
},
|
||||
"description": "",
|
||||
},
|
||||
)
|
||||
method["responses"].setdefault("400", validation_error.ref)
|
||||
method["responses"].setdefault("403", generic_error.ref)
|
||||
|
||||
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
|
||||
|
||||
|
@ -8,6 +8,9 @@ API Browser - {{ tenant.branding_title }}
|
||||
|
||||
{% block head %}
|
||||
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<script>
|
||||
function getCookie(name) {
|
||||
let cookieValue = "";
|
||||
@ -27,62 +30,20 @@ function getCookie(name) {
|
||||
window.addEventListener('DOMContentLoaded', (event) => {
|
||||
const rapidocEl = document.querySelector('rapi-doc');
|
||||
rapidocEl.addEventListener('before-try', (e) => {
|
||||
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||
e.detail.request.headers.append('X-CSRFToken', getCookie("authentik_csrf"));
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<style>
|
||||
img.logo {
|
||||
width: 100%;
|
||||
padding: 1rem 0.5rem 1.5rem 0.5rem;
|
||||
min-height: 48px;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<rapi-doc
|
||||
spec-url="{{ path }}"
|
||||
heading-text=""
|
||||
theme="light"
|
||||
render-style="read"
|
||||
default-schema-tab="schema"
|
||||
heading-text="authentik"
|
||||
theme="dark"
|
||||
render-style="view"
|
||||
primary-color="#fd4b2d"
|
||||
nav-bg-color="#212427"
|
||||
bg-color="#000000"
|
||||
text-color="#000000"
|
||||
nav-text-color="#ffffff"
|
||||
nav-hover-bg-color="#3c3f42"
|
||||
nav-accent-color="#4f5255"
|
||||
nav-hover-text-color="#ffffff"
|
||||
use-path-in-nav-bar="true"
|
||||
nav-item-spacing="relaxed"
|
||||
allow-server-selection="false"
|
||||
show-header="false"
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false">
|
||||
<div slot="nav-logo">
|
||||
<img alt="authentik Logo" class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
<div slot="logo">
|
||||
<img src="{% static 'dist/assets/icons/icon.png' %}" style="width:50px; height:50px" />
|
||||
</div>
|
||||
</rapi-doc>
|
||||
<script>
|
||||
const rapidoc = document.querySelector("rapi-doc");
|
||||
const matcher = window.matchMedia("(prefers-color-scheme: light)");
|
||||
const changer = (ev) => {
|
||||
const style = getComputedStyle(document.documentElement);
|
||||
let bg, text = "";
|
||||
if (matcher.matches) {
|
||||
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
|
||||
text = style.getPropertyValue('--pf-global--Color--300');
|
||||
} else {
|
||||
bg = style.getPropertyValue('--ak-dark-background');
|
||||
text = style.getPropertyValue('--ak-dark-foreground');
|
||||
}
|
||||
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
|
||||
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
|
||||
rapidoc.requestUpdate();
|
||||
};
|
||||
matcher.addEventListener("change", changer);
|
||||
window.addEventListener("load", changer);
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
@ -7,26 +7,38 @@ from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||
from authentik.outposts.managed import OutpostManager
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
"""Test API Authentication"""
|
||||
|
||||
def test_valid_basic(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
auth = b64encode(f":{token.key}".encode()).decode()
|
||||
self.assertEqual(bearer_auth(f"Basic {auth}".encode()), token.user)
|
||||
|
||||
def test_valid_bearer(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_invalid_type(self):
|
||||
"""Test invalid type"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth("foo bar".encode())
|
||||
|
||||
def test_invalid_empty(self):
|
||||
"""Test invalid type"""
|
||||
self.assertIsNone(bearer_auth("Bearer ".encode()))
|
||||
self.assertIsNone(bearer_auth("".encode()))
|
||||
def test_invalid_decode(self):
|
||||
"""Test invalid bas64"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth("Basic bar".encode())
|
||||
|
||||
def test_invalid_empty_password(self):
|
||||
"""Test invalid with empty password"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth("Basic :".encode())
|
||||
|
||||
def test_invalid_no_token(self):
|
||||
"""Test invalid with no token"""
|
||||
@ -34,45 +46,11 @@ class TestAPIAuth(TestCase):
|
||||
auth = b64encode(":abc".encode()).decode()
|
||||
self.assertIsNone(bearer_auth(f"Basic :{auth}".encode()))
|
||||
|
||||
def test_bearer_valid(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_success(self):
|
||||
"""Test managed outpost"""
|
||||
OutpostManager().run()
|
||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
|
||||
|
||||
def test_jwt_valid(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
_scope=SCOPE_AUTHENTIK_API,
|
||||
)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
||||
def test_jwt_missing_scope(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
refresh_token=generate_id(),
|
||||
_scope="",
|
||||
)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
@ -1,29 +0,0 @@
|
||||
"""authentik API Modelviewset tests"""
|
||||
from typing import Callable
|
||||
|
||||
from django.test import TestCase
|
||||
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||
|
||||
from authentik.api.v3.urls import router
|
||||
|
||||
|
||||
class TestModelViewSets(TestCase):
|
||||
"""Test Viewset"""
|
||||
|
||||
|
||||
def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
||||
"""Test Viewset"""
|
||||
|
||||
def tester(self: TestModelViewSets):
|
||||
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
||||
filterset_class = getattr(test_viewset, "filterset_class", None)
|
||||
if not filterset_class:
|
||||
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for _, viewset, _ in router.registry:
|
||||
if not issubclass(viewset, (ModelViewSet, ReadOnlyModelViewSet)):
|
||||
continue
|
||||
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))
|
@ -4,5 +4,7 @@ from django.urls import include, path
|
||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||
|
||||
urlpatterns = [
|
||||
# Remove in 2022.1
|
||||
path("v2beta/", include(v3_urls)),
|
||||
path("v3/", include(v3_urls)),
|
||||
]
|
||||
|
@ -1,17 +1,11 @@
|
||||
"""core Configs API"""
|
||||
from os import path
|
||||
from os import environ, path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
ChoiceField,
|
||||
FloatField,
|
||||
IntegerField,
|
||||
ListField,
|
||||
)
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, IntegerField, ListField
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -27,24 +21,16 @@ class Capabilities(models.TextChoices):
|
||||
|
||||
CAN_SAVE_MEDIA = "can_save_media"
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
"""Config for error reporting"""
|
||||
|
||||
enabled = BooleanField(read_only=True)
|
||||
sentry_dsn = CharField(read_only=True)
|
||||
environment = CharField(read_only=True)
|
||||
send_pii = BooleanField(read_only=True)
|
||||
traces_sample_rate = FloatField(read_only=True)
|
||||
CAN_BACKUP = "can_backup"
|
||||
|
||||
|
||||
class ConfigSerializer(PassiveSerializer):
|
||||
"""Serialize authentik Config into DRF Object"""
|
||||
|
||||
error_reporting = ErrorReportingConfigSerializer(required=True)
|
||||
error_reporting_enabled = BooleanField(read_only=True)
|
||||
error_reporting_environment = CharField(read_only=True)
|
||||
error_reporting_send_pii = BooleanField(read_only=True)
|
||||
|
||||
capabilities = ListField(child=ChoiceField(choices=Capabilities.choices))
|
||||
|
||||
cache_timeout = IntegerField(required=True)
|
||||
@ -66,23 +52,23 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.y_bool("impersonation"):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
if SERVICE_HOST_ENV_NAME in environ:
|
||||
# Running in k8s, only s3 backup is supported
|
||||
if CONFIG.y("postgresql.s3_backup"):
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
else:
|
||||
# Running in compose, backup is always supported
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
return caps
|
||||
|
||||
def get_config(self) -> ConfigSerializer:
|
||||
"""Get Config"""
|
||||
return ConfigSerializer(
|
||||
@extend_schema(responses={200: ConfigSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Retrive public configuration options"""
|
||||
config = ConfigSerializer(
|
||||
{
|
||||
"error_reporting": {
|
||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
|
||||
"environment": CONFIG.y("error_reporting.environment"),
|
||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"error_reporting_enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"error_reporting_environment": CONFIG.y("error_reporting.environment"),
|
||||
"error_reporting_send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
||||
@ -90,8 +76,4 @@ class ConfigView(APIView):
|
||||
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
|
||||
}
|
||||
)
|
||||
|
||||
@extend_schema(responses={200: ConfigSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Retrieve public configuration options"""
|
||||
return Response(self.get_config().data)
|
||||
return Response(config.data)
|
||||
|
66
authentik/api/v3/sentry.py
Normal file
66
authentik/api/v3/sentry.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""Sentry tunnel"""
|
||||
from json import loads
|
||||
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from requests import post
|
||||
from requests.exceptions import RequestException
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
from rest_framework.parsers import BaseParser
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.throttling import AnonRateThrottle
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class PlainTextParser(BaseParser):
|
||||
"""Plain text parser."""
|
||||
|
||||
media_type = "text/plain"
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None) -> str:
|
||||
"""Simply return a string representing the body of the request."""
|
||||
return stream.read()
|
||||
|
||||
|
||||
class CsrfExemptSessionAuthentication(SessionAuthentication):
|
||||
"""CSRF-exempt Session authentication"""
|
||||
|
||||
def enforce_csrf(self, request: Request):
|
||||
return # To not perform the csrf check previously happening
|
||||
|
||||
|
||||
class SentryTunnelView(APIView):
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
|
||||
serializer_class = None
|
||||
parser_classes = [PlainTextParser]
|
||||
throttle_classes = [AnonRateThrottle]
|
||||
permission_classes = [AllowAny]
|
||||
authentication_classes = [CsrfExemptSessionAuthentication]
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
# Only allow usage of this endpoint when error reporting is enabled
|
||||
if not CONFIG.y_bool("error_reporting.enabled", False):
|
||||
return HttpResponse(status=400)
|
||||
# Body is 2 json objects separated by \n
|
||||
full_body = request.body
|
||||
header = loads(full_body.splitlines()[0])
|
||||
# Check that the DSN is what we expect
|
||||
dsn = header.get("dsn", "")
|
||||
if dsn != settings.SENTRY_DSN:
|
||||
return HttpResponse(status=400)
|
||||
response = post(
|
||||
"https://sentry.beryju.org/api/8/envelope/",
|
||||
data=full_body,
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except RequestException:
|
||||
return HttpResponse(status=500)
|
||||
return HttpResponse(status=response.status_code)
|
@ -11,28 +11,25 @@ from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
from authentik.api.v3.config import ConfigView
|
||||
from authentik.api.v3.sentry import SentryTunnelView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||
from authentik.core.api.applications import ApplicationViewSet
|
||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||
from authentik.core.api.devices import AdminDeviceViewSet, DeviceViewSet
|
||||
from authentik.core.api.groups import GroupViewSet
|
||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.sources import SourceViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||
from authentik.events.api.events import EventViewSet
|
||||
from authentik.events.api.notification_mappings import NotificationWebhookMappingViewSet
|
||||
from authentik.events.api.notification_rules import NotificationRuleViewSet
|
||||
from authentik.events.api.notification_transports import NotificationTransportViewSet
|
||||
from authentik.events.api.notifications import NotificationViewSet
|
||||
from authentik.events.api.event import EventViewSet
|
||||
from authentik.events.api.notification import NotificationViewSet
|
||||
from authentik.events.api.notification_rule import NotificationRuleViewSet
|
||||
from authentik.events.api.notification_transport import NotificationTransportViewSet
|
||||
from authentik.flows.api.bindings import FlowStageBindingViewSet
|
||||
from authentik.flows.api.flows import FlowViewSet
|
||||
from authentik.flows.api.stages import StageViewSet
|
||||
from authentik.flows.views.executor import FlowExecutorView
|
||||
from authentik.flows.views.inspector import FlowInspectorView
|
||||
from authentik.flows.views import FlowExecutorView
|
||||
from authentik.outposts.api.outposts import OutpostViewSet
|
||||
from authentik.outposts.api.service_connections import (
|
||||
DockerServiceConnectionViewSet,
|
||||
@ -45,32 +42,30 @@ from authentik.policies.dummy.api import DummyPolicyViewSet
|
||||
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
||||
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||
from authentik.policies.reputation.api import (
|
||||
IPReputationViewSet,
|
||||
ReputationPolicyViewSet,
|
||||
UserReputationViewSet,
|
||||
)
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.plex.api.source import PlexSourceViewSet
|
||||
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
|
||||
from authentik.sources.saml.api.source import SAMLSourceViewSet
|
||||
from authentik.sources.saml.api.source_connection import UserSAMLSourceConnectionViewSet
|
||||
from authentik.sources.saml.api import SAMLSourceViewSet
|
||||
from authentik.stages.authenticator_duo.api import (
|
||||
AuthenticatorDuoStageViewSet,
|
||||
DuoAdminDeviceViewSet,
|
||||
DuoDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_sms.api import (
|
||||
AuthenticatorSMSStageViewSet,
|
||||
SMSAdminDeviceViewSet,
|
||||
SMSDeviceViewSet,
|
||||
)
|
||||
from authentik.stages.authenticator_static.api import (
|
||||
AuthenticatorStaticStageViewSet,
|
||||
StaticAdminDeviceViewSet,
|
||||
@ -103,7 +98,6 @@ from authentik.stages.user_write.api import UserWriteStageViewSet
|
||||
from authentik.tenants.api import TenantViewSet
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
router.include_format_suffixes = False
|
||||
|
||||
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
|
||||
router.register("admin/apps", AppsViewSet, basename="apps")
|
||||
@ -133,13 +127,9 @@ router.register("events/notifications", NotificationViewSet)
|
||||
router.register("events/transports", NotificationTransportViewSet)
|
||||
router.register("events/rules", NotificationRuleViewSet)
|
||||
|
||||
router.register("managed/blueprints", BlueprintInstanceViewSet)
|
||||
|
||||
router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/saml", UserSAMLSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
router.register("sources/saml", SAMLSourceViewSet)
|
||||
router.register("sources/oauth", OAuthSourceViewSet)
|
||||
@ -149,9 +139,11 @@ router.register("policies/all", PolicyViewSet)
|
||||
router.register("policies/bindings", PolicyBindingViewSet)
|
||||
router.register("policies/expression", ExpressionPolicyViewSet)
|
||||
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||
router.register("policies/password", PasswordPolicyViewSet)
|
||||
router.register("policies/reputation/scores", ReputationViewSet)
|
||||
router.register("policies/reputation/users", UserReputationViewSet)
|
||||
router.register("policies/reputation/ips", IPReputationViewSet)
|
||||
router.register("policies/reputation", ReputationPolicyViewSet)
|
||||
|
||||
router.register("providers/all", ProviderViewSet)
|
||||
@ -167,29 +159,16 @@ router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
|
||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
router.register("authenticators/sms", SMSDeviceViewSet)
|
||||
router.register("authenticators/static", StaticDeviceViewSet)
|
||||
router.register("authenticators/totp", TOTPDeviceViewSet)
|
||||
router.register("authenticators/webauthn", WebAuthnDeviceViewSet)
|
||||
router.register(
|
||||
"authenticators/admin/all",
|
||||
AdminDeviceViewSet,
|
||||
basename="admin-device",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/duo",
|
||||
DuoAdminDeviceViewSet,
|
||||
basename="admin-duodevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/sms",
|
||||
SMSAdminDeviceViewSet,
|
||||
basename="admin-smsdevice",
|
||||
)
|
||||
router.register(
|
||||
"authenticators/admin/static",
|
||||
StaticAdminDeviceViewSet,
|
||||
@ -204,7 +183,6 @@ router.register(
|
||||
|
||||
router.register("stages/all", StageViewSet)
|
||||
router.register("stages/authenticator/duo", AuthenticatorDuoStageViewSet)
|
||||
router.register("stages/authenticator/sms", AuthenticatorSMSStageViewSet)
|
||||
router.register("stages/authenticator/static", AuthenticatorStaticStageViewSet)
|
||||
router.register("stages/authenticator/totp", AuthenticatorTOTPStageViewSet)
|
||||
router.register("stages/authenticator/validate", AuthenticatorValidateStageViewSet)
|
||||
@ -247,11 +225,7 @@ urlpatterns = (
|
||||
FlowExecutorView.as_view(),
|
||||
name="flow-executor",
|
||||
),
|
||||
path(
|
||||
"flows/inspector/<slug:flow_slug>/",
|
||||
FlowInspectorView.as_view(),
|
||||
name="flow-inspector",
|
||||
),
|
||||
path("sentry/", SentryTunnelView.as_view(), name="sentry"),
|
||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||
]
|
||||
)
|
||||
|
@ -1,127 +0,0 @@
|
||||
"""Serializer mixin for managed models"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
|
||||
class ManagedSerializer:
|
||||
"""Managed Serializer"""
|
||||
|
||||
managed = CharField(read_only=True, allow_null=True)
|
||||
|
||||
|
||||
class MetadataSerializer(PassiveSerializer):
|
||||
"""Serializer for blueprint metadata"""
|
||||
|
||||
name = CharField()
|
||||
labels = JSONField()
|
||||
|
||||
|
||||
class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"""Info about a single blueprint instance file"""
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Ensure the path specified is retrievable"""
|
||||
try:
|
||||
BlueprintInstance(path=path).retrieve()
|
||||
except BlueprintRetrievalFailed as exc:
|
||||
raise ValidationError(exc) from exc
|
||||
return path
|
||||
|
||||
def validate_content(self, content: str) -> str:
|
||||
"""Ensure content (if set) is a valid blueprint"""
|
||||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
|
||||
raise ValidationError(_("Either path or content must be set."))
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = BlueprintInstance
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"path",
|
||||
"context",
|
||||
"last_applied",
|
||||
"last_applied_hash",
|
||||
"status",
|
||||
"enabled",
|
||||
"managed_models",
|
||||
"metadata",
|
||||
"content",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"status": {"read_only": True},
|
||||
"last_applied": {"read_only": True},
|
||||
"last_applied_hash": {"read_only": True},
|
||||
"managed_models": {"read_only": True},
|
||||
"metadata": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Blueprint instances"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = BlueprintInstanceSerializer
|
||||
queryset = BlueprintInstance.objects.all()
|
||||
search_fields = ["name", "path"]
|
||||
filterset_fields = ["name", "path"]
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: ListSerializer(
|
||||
child=inline_serializer(
|
||||
"BlueprintFile",
|
||||
fields={
|
||||
"path": CharField(),
|
||||
"last_m": DateTimeField(),
|
||||
"hash": CharField(),
|
||||
"meta": MetadataSerializer(required=False, read_only=True),
|
||||
},
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def available(self, request: Request) -> Response:
|
||||
"""Get blueprints"""
|
||||
files: list[dict] = blueprints_find_dict.delay().get()
|
||||
return Response(files)
|
||||
|
||||
@permission_required("authentik_blueprints.view_blueprintinstance")
|
||||
@extend_schema(
|
||||
request=None,
|
||||
responses={
|
||||
200: BlueprintInstanceSerializer(),
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
def apply(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Apply a blueprint"""
|
||||
blueprint = self.get_object()
|
||||
apply_blueprint.delay(str(blueprint.pk)).get()
|
||||
return self.retrieve(request, *args, **kwargs)
|
@ -1,66 +0,0 @@
|
||||
"""authentik Blueprints app"""
|
||||
|
||||
from importlib import import_module
|
||||
from inspect import ismethod
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
|
||||
class ManagedAppConfig(AppConfig):
|
||||
"""Basic reconciliation logic for apps"""
|
||||
|
||||
_logger: BoundLogger
|
||||
|
||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||
super().__init__(app_name, *args, **kwargs)
|
||||
self._logger = get_logger().bind(app_name=app_name)
|
||||
|
||||
def ready(self) -> None:
|
||||
self.reconcile()
|
||||
return super().ready()
|
||||
|
||||
def import_module(self, path: str):
|
||||
"""Load module"""
|
||||
import_module(path)
|
||||
|
||||
def reconcile(self) -> None:
|
||||
"""reconcile ourselves"""
|
||||
prefix = "reconcile_"
|
||||
for meth_name in dir(self):
|
||||
meth = getattr(self, meth_name)
|
||||
if not ismethod(meth):
|
||||
continue
|
||||
if not meth_name.startswith(prefix):
|
||||
continue
|
||||
name = meth_name.replace(prefix, "")
|
||||
try:
|
||||
self._logger.debug("Starting reconciler", name=name)
|
||||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
"""authentik Blueprints app"""
|
||||
|
||||
name = "authentik.blueprints"
|
||||
label = "authentik_blueprints"
|
||||
verbose_name = "authentik Blueprints"
|
||||
default = True
|
||||
|
||||
def reconcile_load_blueprints_v1_tasks(self):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
def reconcile_blueprints_discover(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||
|
||||
blueprints_discover.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
self.import_module("authentik.blueprints.v1.meta.apply_blueprint")
|
@ -1,31 +0,0 @@
|
||||
"""Apply blueprint from commandline"""
|
||||
from sys import exit as sys_exit
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Apply blueprint from commandline"""
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Apply all blueprints in order, abort when one fails to import"""
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer(content)
|
||||
valid, logs = importer.validate()
|
||||
if not valid:
|
||||
for log in logs:
|
||||
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("blueprints", nargs="+", type=str)
|
@ -1,17 +0,0 @@
|
||||
"""Export blueprint of current authentik install"""
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.exporter import Exporter
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Export blueprint of current authentik install"""
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Export blueprint of current authentik install"""
|
||||
exporter = Exporter()
|
||||
self.stdout.write(exporter.export_to_string())
|
@ -1,36 +0,0 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
from json import dumps, loads
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import registry
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
|
||||
schema: dict
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
path = Path(__file__).parent.joinpath("./schema_template.json")
|
||||
with open(path, "r", encoding="utf-8") as _template_file:
|
||||
self.schema = loads(_template_file.read())
|
||||
self.set_model_allowed()
|
||||
self.stdout.write(dumps(self.schema, indent=4))
|
||||
|
||||
def set_model_allowed(self):
|
||||
"""Set model enum"""
|
||||
model_names = []
|
||||
for model in registry.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
|
||||
model_names.sort()
|
||||
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names
|
@ -1,105 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "http://example.com/example.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"default": {},
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"labels": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$id": "#entry",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"placeholder"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Commonly available field, may not exist on all models"
|
||||
}
|
||||
},
|
||||
"default": {},
|
||||
"additionalProperties": true
|
||||
},
|
||||
"identifiers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"properties": {
|
||||
"pk": {
|
||||
"description": "Commonly available field, may not exist on all models",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,135 +0,0 @@
|
||||
# Generated by Django 4.0.6 on 2022-07-31 17:35
|
||||
import uuid
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from dacite.core import from_dict
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
|
||||
"""Check if blueprint should be imported"""
|
||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||
if not raw_blueprint:
|
||||
return
|
||||
metadata = raw_blueprint.get("metadata", None)
|
||||
version = raw_blueprint.get("version", 1)
|
||||
if version != 1:
|
||||
return
|
||||
blueprint_file.seek(0)
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
meta = from_dict(BlueprintMetadata, metadata)
|
||||
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
||||
return
|
||||
if not instance:
|
||||
instance = BlueprintInstance(
|
||||
name=meta.name if meta else str(rel_path),
|
||||
path=str(rel_path),
|
||||
context={},
|
||||
status=BlueprintInstanceStatus.UNKNOWN,
|
||||
enabled=True,
|
||||
managed_models=[],
|
||||
last_applied_hash="",
|
||||
metadata=metadata,
|
||||
)
|
||||
instance.save()
|
||||
|
||||
|
||||
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
BlueprintInstance = apps.get_model("authentik_blueprints", "BlueprintInstance")
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||
|
||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||
# If we already have flows (and we should always run before flow migrations)
|
||||
# then this is an existing install and we want to disable all blueprints
|
||||
if Flow.objects.using(db_alias).all().exists():
|
||||
blueprint.enabled = False
|
||||
# System blueprints are always enabled
|
||||
if blueprint.metadata.get("labels", {}).get(LABEL_AUTHENTIK_SYSTEM, "").lower() == "true":
|
||||
blueprint.enabled = True
|
||||
blueprint.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [("authentik_flows", "0001_initial")]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="BlueprintInstance",
|
||||
fields=[
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("last_updated", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
(
|
||||
"instance_uuid",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||
),
|
||||
),
|
||||
("name", models.TextField()),
|
||||
("metadata", models.JSONField(default=dict)),
|
||||
("path", models.TextField()),
|
||||
("context", models.JSONField(default=dict)),
|
||||
("last_applied", models.DateTimeField(auto_now=True)),
|
||||
("last_applied_hash", models.TextField()),
|
||||
(
|
||||
"status",
|
||||
models.TextField(
|
||||
choices=[
|
||||
("successful", "Successful"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("orphaned", "Orphaned"),
|
||||
("unknown", "Unknown"),
|
||||
],
|
||||
default="unknown",
|
||||
),
|
||||
),
|
||||
("enabled", models.BooleanField(default=True)),
|
||||
(
|
||||
"managed_models",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(), default=list, size=None
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Blueprint Instance",
|
||||
"verbose_name_plural": "Blueprint Instances",
|
||||
"unique_together": {("name", "path")},
|
||||
},
|
||||
),
|
||||
migrations.RunPython(migration_blueprint_import),
|
||||
]
|
@ -1,23 +0,0 @@
|
||||
# Generated by Django 4.1.5 on 2023-01-10 19:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="blueprintinstance",
|
||||
name="content",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="path",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
]
|
@ -1,120 +0,0 @@
|
||||
"""blueprint models"""
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class BlueprintRetrievalFailed(SentryIgnoredException):
|
||||
"""Error raised when we're unable to fetch the blueprint contents, whether it be HTTP files
|
||||
not being accessible or local files not being readable"""
|
||||
|
||||
|
||||
class ManagedModel(models.Model):
|
||||
"""Model which can be managed by authentik exclusively"""
|
||||
|
||||
managed = models.TextField(
|
||||
default=None,
|
||||
null=True,
|
||||
verbose_name=_("Managed by authentik"),
|
||||
help_text=_(
|
||||
(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
)
|
||||
),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class BlueprintInstanceStatus(models.TextChoices):
|
||||
"""Instance status"""
|
||||
|
||||
SUCCESSFUL = "successful"
|
||||
WARNING = "warning"
|
||||
ERROR = "error"
|
||||
ORPHANED = "orphaned"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
"""Instance of a single blueprint. Can be parameterized via context attribute when
|
||||
blueprint in `path` has inputs."""
|
||||
|
||||
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
|
||||
name = models.TextField()
|
||||
metadata = models.JSONField(default=dict)
|
||||
path = models.TextField(default="", blank=True)
|
||||
content = models.TextField(default="", blank=True)
|
||||
context = models.JSONField(default=dict)
|
||||
last_applied = models.DateTimeField(auto_now=True)
|
||||
last_applied_hash = models.TextField()
|
||||
status = models.TextField(
|
||||
choices=BlueprintInstanceStatus.choices, default=BlueprintInstanceStatus.UNKNOWN
|
||||
)
|
||||
enabled = models.BooleanField(default=True)
|
||||
managed_models = ArrayField(models.TextField(), default=list)
|
||||
|
||||
def retrieve_oci(self) -> str:
|
||||
"""Get blueprint from an OCI registry"""
|
||||
client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
|
||||
try:
|
||||
manifests = client.fetch_manifests()
|
||||
return client.fetch_blobs(manifests)
|
||||
except OCIException as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve_file(self) -> str:
|
||||
"""Get blueprint from path"""
|
||||
try:
|
||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
except (IOError, OSError) as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve(self) -> str:
|
||||
"""Retrieve blueprint contents"""
|
||||
if self.path.startswith("oci://"):
|
||||
return self.retrieve_oci()
|
||||
if self.path != "":
|
||||
return self.retrieve_file()
|
||||
return self.content
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.blueprints.api import BlueprintInstanceSerializer
|
||||
|
||||
return BlueprintInstanceSerializer
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
(
|
||||
"name",
|
||||
"path",
|
||||
),
|
||||
)
|
@ -1,12 +0,0 @@
|
||||
"""blueprint Settings"""
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
"""Blueprint helpers"""
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
|
||||
def apply_blueprint(*files: str):
|
||||
"""Apply blueprint before test"""
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
|
||||
def wrapper_outer(func: Callable):
|
||||
"""Apply blueprint before test"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
for file in files:
|
||||
content = BlueprintInstance(path=file).retrieve()
|
||||
Importer(content).apply()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def reconcile_app(app_name: str):
|
||||
"""Re-reconcile AppConfig methods"""
|
||||
|
||||
def wrapper_outer(func: Callable):
|
||||
"""Re-reconcile AppConfig methods"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
config = apps.get_app_config(app_name)
|
||||
if isinstance(config, ManagedAppConfig):
|
||||
config.reconcile()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||
fixture = _fixture.read()
|
||||
try:
|
||||
return fixture % kwargs
|
||||
except TypeError:
|
||||
return fixture
|
@ -1,21 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,21 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,13 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: absent
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
expression: |
|
||||
return True
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
state: absent
|
@ -1,10 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: created
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,10 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: present
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,12 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
required: true
|
||||
placeholder: Username
|
||||
order: 0
|
150
authentik/blueprints/tests/fixtures/tags.yaml
vendored
150
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@ -1,150 +0,0 @@
|
||||
version: 1
|
||||
context:
|
||||
foo: bar
|
||||
policy_property: name
|
||||
policy_property_value: foo-bar-baz-qux
|
||||
sequence:
|
||||
- foo
|
||||
- bar
|
||||
mapping:
|
||||
key1: value
|
||||
key2: 2
|
||||
entries:
|
||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%s", present]
|
||||
identifiers:
|
||||
slug: test
|
||||
attrs:
|
||||
name: test
|
||||
provider_type: github
|
||||
consumer_key: !Env foo
|
||||
consumer_secret: !Env [bar, baz]
|
||||
authentication_flow:
|
||||
!Find [
|
||||
authentik_flows.Flow,
|
||||
[slug, default-source-authentication],
|
||||
]
|
||||
enrollment_flow:
|
||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s-%s, !Context foo, !Context bar, qux]
|
||||
id: policy
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
!Find [
|
||||
authentik_policies_expression.expressionpolicy,
|
||||
[
|
||||
!Context policy_property,
|
||||
!Context policy_property_value,
|
||||
],
|
||||
[expression, return True],
|
||||
],
|
||||
suffix,
|
||||
]
|
||||
policy_pk2: !Format ["%s-%s", !KeyOf policy, suffix]
|
||||
boolAnd:
|
||||
!Condition [AND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolNand:
|
||||
!Condition [NAND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolOr:
|
||||
!Condition [
|
||||
OR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolNor:
|
||||
!Condition [
|
||||
NOR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolXor:
|
||||
!Condition [XOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolXnor:
|
||||
!Condition [XNOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolComplex:
|
||||
!Condition [
|
||||
XNOR,
|
||||
!Condition [AND, !Context non_existing],
|
||||
!Condition [NOR, a string],
|
||||
!Condition [XOR, null],
|
||||
]
|
||||
if_true_complex:
|
||||
!If [
|
||||
true,
|
||||
{
|
||||
dictionary:
|
||||
{
|
||||
with: { keys: "and_values" },
|
||||
and_nested_custom_tags:
|
||||
!Format ["foo-%s", !Context foo],
|
||||
},
|
||||
},
|
||||
null,
|
||||
]
|
||||
if_false_complex:
|
||||
!If [
|
||||
!Condition [AND, false],
|
||||
null,
|
||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||
]
|
||||
if_true_simple: !If [!Context foo, true, text]
|
||||
if_false_simple: !If [null, false, 2]
|
||||
enumerate_mapping_to_mapping: !Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
||||
]
|
||||
enumerate_mapping_to_sequence: !Enumerate [
|
||||
!Context mapping,
|
||||
SEQ,
|
||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_sequence: !Enumerate [
|
||||
!Context sequence,
|
||||
SEQ,
|
||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_mapping: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[!Format ["index: %d", !Index 0], !Value 0]
|
||||
]
|
||||
nested_complex_enumeration: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[
|
||||
!Index 0,
|
||||
!Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[
|
||||
!Format ["%s", !Index 0],
|
||||
[
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
||||
{
|
||||
outer_value: !Value 1,
|
||||
outer_index: !Index 1,
|
||||
middle_value: !Value 0,
|
||||
middle_index: !Index 0
|
||||
}
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
identifiers:
|
||||
name: test
|
||||
conditions:
|
||||
- !Condition [AND, true, true, text]
|
||||
- true
|
||||
- text
|
||||
model: authentik_core.group
|
@ -1,34 +0,0 @@
|
||||
"""authentik managed models tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.apps import apps
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
"""Test Models"""
|
||||
|
||||
|
||||
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for app in apps.get_app_configs():
|
||||
if not app.label.startswith("authentik"):
|
||||
continue
|
||||
for model in app.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))
|
@ -1,98 +0,0 @@
|
||||
"""Test blueprints OCI"""
|
||||
from django.test import TransactionTestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
|
||||
|
||||
|
||||
class TestBlueprintOCI(TransactionTestCase):
|
||||
"""Test Blueprints OCI Tasks"""
|
||||
|
||||
def test_successful(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io/v2/goauthentik/blueprints/test/blobs/foo", text="foo")
|
||||
|
||||
self.assertEqual(
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve(),
|
||||
"foo",
|
||||
)
|
||||
|
||||
def test_manifests_error(self):
|
||||
"""Test manifests request erroring"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest", status_code=401
|
||||
)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_manifests_error_response(self):
|
||||
"""Test manifests request erroring"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={"errors": ["foo"]},
|
||||
)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_no_matching_blob(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE + "foo",
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_blob_error(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io/v2/goauthentik/blueprints/test/blobs/foo", status_code=401)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
@ -1,38 +0,0 @@
|
||||
"""test packaged blueprints"""
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/90-default-tenant.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
|
||||
|
||||
def blueprint_tester(file_name: Path) -> Callable:
|
||||
"""This is used instead of subTest for better visibility"""
|
||||
|
||||
def tester(self: TestPackaged):
|
||||
base = Path("blueprints/")
|
||||
rel_path = Path(file_name).relative_to(base)
|
||||
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
|
||||
if "local" in str(blueprint_file):
|
||||
continue
|
||||
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))
|
@ -1,300 +0,0 @@
|
||||
"""Test blueprints v1"""
|
||||
from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||
from authentik.stages.user_login.models import UserLoginStage
|
||||
|
||||
|
||||
class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test Blueprints"""
|
||||
|
||||
def test_blueprint_invalid_format(self):
|
||||
"""Test blueprint with invalid format"""
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
def test_validated_import_dict_identifiers(self):
|
||||
"""Test importing blueprints with dict identifiers."""
|
||||
Group.objects.filter(name__istartswith="test").delete()
|
||||
|
||||
Group.objects.create(
|
||||
name="test1",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["a_value", "other_value"],
|
||||
},
|
||||
)
|
||||
Group.objects.create(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test999",
|
||||
# All attributes used as identifiers are kept and merged with the
|
||||
# new attributes declared in the blueprint
|
||||
attributes={"key": ["updated_value"], "other_key": ["other_value"]},
|
||||
)
|
||||
)
|
||||
self.assertFalse(Group.objects.filter(name="test1"))
|
||||
|
||||
def test_export_validate_import(self):
|
||||
"""Test export and validate it"""
|
||||
flow_slug = generate_id()
|
||||
with transaction_rollback():
|
||||
login_stage = UserLoginStage.objects.create(name=generate_id())
|
||||
|
||||
flow = Flow.objects.create(
|
||||
slug=flow_slug,
|
||||
designation=FlowDesignation.AUTHENTICATION,
|
||||
name=generate_id(),
|
||||
title=generate_id(),
|
||||
)
|
||||
FlowStageBinding.objects.update_or_create(
|
||||
target=flow,
|
||||
stage=login_stage,
|
||||
order=0,
|
||||
)
|
||||
|
||||
exporter = FlowExporter(flow)
|
||||
export = exporter.export()
|
||||
self.assertEqual(len(export.entries), 3)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertTrue(Flow.objects.filter(slug=flow_slug).exists())
|
||||
|
||||
def test_export_validate_import_re_import(self):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
|
||||
def test_import_yaml_tags(self):
|
||||
"""Test some yaml tags"""
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
self.assertTrue(policy)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
attributes={
|
||||
"policy_pk1": str(policy.pk) + "-suffix",
|
||||
"policy_pk2": str(policy.pk) + "-suffix",
|
||||
"boolAnd": True,
|
||||
"boolNand": False,
|
||||
"boolOr": True,
|
||||
"boolNor": False,
|
||||
"boolXor": True,
|
||||
"boolXnor": False,
|
||||
"boolComplex": True,
|
||||
"if_true_complex": {
|
||||
"dictionary": {
|
||||
"with": {"keys": "and_values"},
|
||||
"and_nested_custom_tags": "foo-bar",
|
||||
}
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
slug="test",
|
||||
consumer_key=environ["foo"],
|
||||
)
|
||||
)
|
||||
|
||||
def test_export_validate_import_policies(self):
|
||||
"""Test export and validate it"""
|
||||
flow_slug = generate_id()
|
||||
stage_name = generate_id()
|
||||
with transaction_rollback():
|
||||
flow_policy = ExpressionPolicy.objects.create(
|
||||
name=generate_id(),
|
||||
expression="return True",
|
||||
)
|
||||
flow = Flow.objects.create(
|
||||
slug=flow_slug,
|
||||
designation=FlowDesignation.AUTHENTICATION,
|
||||
name=generate_id(),
|
||||
title=generate_id(),
|
||||
)
|
||||
PolicyBinding.objects.create(policy=flow_policy, target=flow, order=0)
|
||||
|
||||
user_login = UserLoginStage.objects.create(name=stage_name)
|
||||
fsb = FlowStageBinding.objects.create(target=flow, stage=user_login, order=0)
|
||||
PolicyBinding.objects.create(policy=flow_policy, target=fsb, order=0)
|
||||
|
||||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||
self.assertTrue(Flow.objects.filter(slug=flow_slug).exists())
|
||||
|
||||
def test_export_validate_import_prompt(self):
|
||||
"""Test export and validate it"""
|
||||
with transaction_rollback():
|
||||
# First stage fields
|
||||
username_prompt = Prompt.objects.create(
|
||||
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||
)
|
||||
password = Prompt.objects.create(
|
||||
field_key="password",
|
||||
label="Password",
|
||||
order=1,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
password_repeat = Prompt.objects.create(
|
||||
field_key="password_repeat",
|
||||
label="Password (repeat)",
|
||||
order=2,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
|
||||
# Stages
|
||||
first_stage = PromptStage.objects.create(name=generate_id())
|
||||
first_stage.fields.set([username_prompt, password, password_repeat])
|
||||
first_stage.save()
|
||||
|
||||
flow = Flow.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
designation=FlowDesignation.ENROLLMENT,
|
||||
title=generate_id(),
|
||||
)
|
||||
|
||||
FlowStageBinding.objects.create(target=flow, stage=first_stage, order=0)
|
||||
|
||||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
@ -1,70 +0,0 @@
|
||||
"""Test blueprints v1 api"""
|
||||
from json import loads
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
TMP = mkdtemp("authentik-blueprints")
|
||||
|
||||
|
||||
class TestBlueprintsV1API(APITestCase):
|
||||
"""Test Blueprints API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_api_available(self):
|
||||
"""Test valid file"""
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
res = self.client.get(reverse("authentik_api:blueprintinstance-available"))
|
||||
self.assertEqual(res.status_code, 200)
|
||||
response = loads(res.content.decode())
|
||||
self.assertEqual(len(response), 1)
|
||||
self.assertEqual(
|
||||
response[0]["hash"],
|
||||
(
|
||||
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1bd1f9b352"
|
||||
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
|
||||
def test_api_blank(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
|
||||
)
|
||||
|
||||
def test_api_content(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
"content": '{"version": 3}',
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})
|
@ -1,43 +0,0 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test Blueprints conditions attribute"""
|
||||
|
||||
def test_conditions_fulfilled(self):
|
||||
"""Test conditions fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug1).first()
|
||||
self.assertEqual(flow.slug, flow_slug1)
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug2).first()
|
||||
self.assertEqual(flow.slug, flow_slug2)
|
||||
|
||||
def test_conditions_not_fulfilled(self):
|
||||
"""Test conditions not fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug1))
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug2))
|
@ -1,82 +0,0 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
"""Test Blueprints state attribute"""
|
||||
|
||||
def test_state_present(self):
|
||||
"""Test state present"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
# Update object
|
||||
flow.title = "bar"
|
||||
flow.save()
|
||||
|
||||
flow.refresh_from_db()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer updates it
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.title, "foo")
|
||||
|
||||
def test_state_created(self):
|
||||
"""Test state created"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
# Update object
|
||||
flow.title = "bar"
|
||||
flow.save()
|
||||
|
||||
flow.refresh_from_db()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer doesn't update it
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
def test_state_absent(self):
|
||||
"""Test state absent"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertIsNone(flow)
|
@ -1,157 +0,0 @@
|
||||
"""Test blueprints v1 tasks"""
|
||||
from hashlib import sha512
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
TMP = mkdtemp("authentik-blueprints")
|
||||
|
||||
|
||||
class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
"""Test Blueprints v1 Tasks"""
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_invalid_file_syntax(self):
|
||||
"""Test syntactically invalid file"""
|
||||
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
|
||||
file.write(b"{")
|
||||
file.flush()
|
||||
blueprints = blueprints_find()
|
||||
self.assertEqual(blueprints, [])
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_invalid_file_version(self):
|
||||
"""Test invalid file"""
|
||||
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
|
||||
file.write(b"version: 2")
|
||||
file.flush()
|
||||
blueprints = blueprints_find()
|
||||
self.assertEqual(blueprints, [])
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid(self):
|
||||
"""Test valid file"""
|
||||
blueprint_id = generate_id()
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": blueprint_id,
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
instance.metadata,
|
||||
{
|
||||
"name": blueprint_id,
|
||||
"labels": {},
|
||||
},
|
||||
)
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_updated(self):
|
||||
"""Test valid file"""
|
||||
BlueprintInstance.objects.filter(name="foo").delete()
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": "foo",
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
(
|
||||
"b86ec439b3857350714f070d2833490e736d9155d3d97b2cac13f3b352223e5a"
|
||||
"1adbf8ec56fa616d46090cc4773ff9e46c4e509fde96b97de87dd21fa329ca1a"
|
||||
),
|
||||
)
|
||||
self.assertEqual(blueprint.metadata, {"labels": {}, "name": "foo"})
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": "foo",
|
||||
"labels": {
|
||||
"foo": "bar",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
(
|
||||
"87b68b10131d2c9751ed308bba38f04734b9e2cdf8532ed617bc52979b063c49"
|
||||
"2564f33f3d20ab9d5f0fd9e6eb77a13942e060199f147789cb7afab9690e72b5"
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
blueprint.metadata,
|
||||
{
|
||||
"name": "foo",
|
||||
"labels": {"foo": "bar"},
|
||||
},
|
||||
)
|
||||
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_disabled(self):
|
||||
"""Test valid file"""
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.create(
|
||||
name=generate_id(),
|
||||
path=file.name,
|
||||
enabled=False,
|
||||
status=BlueprintInstanceStatus.UNKNOWN,
|
||||
)
|
||||
instance.refresh_from_db()
|
||||
self.assertEqual(instance.last_applied_hash, "")
|
||||
self.assertEqual(
|
||||
instance.status,
|
||||
BlueprintInstanceStatus.UNKNOWN,
|
||||
)
|
||||
apply_blueprint(instance.pk) # pylint: disable=no-value-for-parameter
|
||||
instance.refresh_from_db()
|
||||
self.assertEqual(instance.last_applied_hash, "")
|
||||
self.assertEqual(
|
||||
instance.status,
|
||||
BlueprintInstanceStatus.UNKNOWN,
|
||||
)
|
@ -1,575 +0,0 @@
|
||||
"""transfer common classes"""
|
||||
from collections import OrderedDict
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||
from enum import Enum
|
||||
from functools import reduce
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
|
||||
|
||||
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
"""Get object's attributes via their serializer, and convert it to a normal dict"""
|
||||
serializer: Serializer = obj.serializer(obj)
|
||||
data = dict(serializer.data)
|
||||
|
||||
for field_name, _field in serializer.fields.items():
|
||||
_field: Field
|
||||
if field_name not in data:
|
||||
continue
|
||||
if _field.read_only:
|
||||
data.pop(field_name, None)
|
||||
if _field.get_initial() == data.get(field_name, None):
|
||||
data.pop(field_name, None)
|
||||
if field_name.endswith("_set"):
|
||||
data.pop(field_name, None)
|
||||
return data
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintEntryState:
|
||||
"""State of a single instance"""
|
||||
|
||||
instance: Optional[Model] = None
|
||||
|
||||
|
||||
class BlueprintEntryDesiredState(Enum):
|
||||
"""State an entry should be reconciled to"""
|
||||
|
||||
ABSENT = "absent"
|
||||
PRESENT = "present"
|
||||
CREATED = "created"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintEntry:
|
||||
"""Single entry of a blueprint"""
|
||||
|
||||
model: Union[str, "YAMLTag"]
|
||||
state: Union[BlueprintEntryDesiredState, "YAMLTag"] = field(
|
||||
default=BlueprintEntryDesiredState.PRESENT
|
||||
)
|
||||
conditions: list[Any] = field(default_factory=list)
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||
|
||||
id: Optional[str] = None
|
||||
|
||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||
|
||||
def __post_init__(self, *args, **kwargs) -> None:
|
||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||
identifiers = {
|
||||
"pk": model.pk,
|
||||
}
|
||||
all_attrs = get_attrs(model)
|
||||
|
||||
for extra_identifier_name in extra_identifier_names:
|
||||
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name, None)
|
||||
return BlueprintEntry(
|
||||
identifiers=identifiers,
|
||||
model=f"{model._meta.app_label}.{model._meta.model_name}",
|
||||
attrs=all_attrs,
|
||||
)
|
||||
|
||||
def _get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
raise ValueError("depth must be a positive number or zero")
|
||||
|
||||
if context_tag_type:
|
||||
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
|
||||
else:
|
||||
contexts = self.__tag_contexts
|
||||
|
||||
try:
|
||||
return contexts[-(depth + 1)]
|
||||
except IndexError:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
val = copy(value)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.append(value)
|
||||
|
||||
if isinstance(value, YAMLTag):
|
||||
val = value.resolve(self, blueprint)
|
||||
|
||||
if isinstance(value, dict):
|
||||
for key, inner_value in value.items():
|
||||
val[key] = self.tag_resolver(inner_value, blueprint)
|
||||
if isinstance(value, list):
|
||||
for idx, inner_value in enumerate(value):
|
||||
val[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.pop()
|
||||
|
||||
return val
|
||||
|
||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.attrs, blueprint)
|
||||
|
||||
def get_identifiers(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.identifiers, blueprint)
|
||||
|
||||
def get_state(self, blueprint: "Blueprint") -> BlueprintEntryDesiredState:
|
||||
"""Get the blueprint state, with yaml tags resolved if present"""
|
||||
return BlueprintEntryDesiredState(self.tag_resolver(self.state, blueprint))
|
||||
|
||||
def get_model(self, blueprint: "Blueprint") -> str:
|
||||
"""Get the blueprint model, with yaml tags resolved if present"""
|
||||
return str(self.tag_resolver(self.model, blueprint))
|
||||
|
||||
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
|
||||
"""Check all conditions of this entry match (evaluate to True)"""
|
||||
return all(self.tag_resolver(self.conditions, blueprint))
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintMetadata:
|
||||
"""Optional blueprint metadata"""
|
||||
|
||||
name: str
|
||||
labels: dict[str, str] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Blueprint:
|
||||
"""Dataclass used for a full export"""
|
||||
|
||||
version: int = field(default=1)
|
||||
entries: list[BlueprintEntry] = field(default_factory=list)
|
||||
context: dict = field(default_factory=dict)
|
||||
|
||||
metadata: Optional[BlueprintMetadata] = field(default=None)
|
||||
|
||||
|
||||
class YAMLTag:
|
||||
"""Base class for all YAML Tags"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag logic"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class YAMLTagContext:
|
||||
"""Base class for all YAML Tag Contexts"""
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag context logic"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class KeyOf(YAMLTag):
|
||||
"""Reference another object by their ID"""
|
||||
|
||||
id_from: str
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.id_from = node.value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
for _entry in blueprint.entries:
|
||||
if _entry.id == self.id_from and _entry._state.instance:
|
||||
# Special handling for PolicyBindingModels, as they'll have a different PK
|
||||
# which is used when creating policy bindings
|
||||
if (
|
||||
isinstance(_entry._state.instance, PolicyBindingModel)
|
||||
and entry.model.lower() == "authentik_policies.policybinding"
|
||||
):
|
||||
return _entry._state.instance.pbm_uuid
|
||||
return _entry._state.instance.pk
|
||||
raise EntryInvalidError(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||
)
|
||||
|
||||
|
||||
class Env(YAMLTag):
|
||||
"""Lookup environment variable with optional default"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return getenv(self.key, self.default)
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
"""Lookup key from instance context"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
value = self.default
|
||||
if self.key in blueprint.context:
|
||||
value = blueprint.context[self.key]
|
||||
return value
|
||||
|
||||
|
||||
class Format(YAMLTag):
|
||||
"""Format a string"""
|
||||
|
||||
format_string: str
|
||||
args: list[Any]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
try:
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Find(YAMLTag):
|
||||
"""Find any object"""
|
||||
|
||||
model_name: str | YAMLTag
|
||||
conditions: list[list]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.model_name = loader.construct_object(node.value[0])
|
||||
self.conditions = []
|
||||
for raw_node in node.value[1:]:
|
||||
values = []
|
||||
for node_values in raw_node.value:
|
||||
values.append(loader.construct_object(node_values))
|
||||
self.conditions.append(values)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.model_name, YAMLTag):
|
||||
model_name = self.model_name.resolve(entry, blueprint)
|
||||
else:
|
||||
model_name = self.model_name
|
||||
|
||||
model_class = apps.get_model(*model_name.split("."))
|
||||
|
||||
query = Q()
|
||||
for cond in self.conditions:
|
||||
if isinstance(cond[0], YAMLTag):
|
||||
query_key = cond[0].resolve(entry, blueprint)
|
||||
else:
|
||||
query_key = cond[0]
|
||||
if isinstance(cond[1], YAMLTag):
|
||||
query_value = cond[1].resolve(entry, blueprint)
|
||||
else:
|
||||
query_value = cond[1]
|
||||
query &= Q(**{query_key: query_value})
|
||||
instance = model_class.objects.filter(query).first()
|
||||
if instance:
|
||||
return instance.pk
|
||||
return None
|
||||
|
||||
|
||||
class Condition(YAMLTag):
|
||||
"""Convert all values to a single boolean"""
|
||||
|
||||
mode: Literal["AND", "NAND", "OR", "NOR", "XOR", "XNOR"]
|
||||
args: list[Any]
|
||||
|
||||
_COMPARATORS = {
|
||||
# Using all and any here instead of from operator import iand, ior
|
||||
# to improve performance
|
||||
"AND": all,
|
||||
"NAND": lambda args: not all(args),
|
||||
"OR": any,
|
||||
"NOR": lambda args: not any(args),
|
||||
"XOR": lambda args: reduce(ixor, args) if len(args) > 1 else args[0],
|
||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
"""Select YAML to use based on condition"""
|
||||
|
||||
condition: Any
|
||||
when_true: Any
|
||||
when_false: Any
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.condition = loader.construct_object(node.value[0])
|
||||
self.when_true = loader.construct_object(node.value[1])
|
||||
self.when_false = loader.construct_object(node.value[2])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.condition, YAMLTag):
|
||||
condition = self.condition.resolve(entry, blueprint)
|
||||
else:
|
||||
condition = self.condition
|
||||
|
||||
try:
|
||||
return entry.tag_resolver(
|
||||
self.when_true if condition else self.when_false,
|
||||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
"""Iterate over an iterable."""
|
||||
|
||||
iterable: YAMLTag | Iterable
|
||||
item_body: Any
|
||||
output_body: Literal["SEQ", "MAP"]
|
||||
|
||||
_OUTPUT_BODIES = {
|
||||
"SEQ": (list, lambda a, b: [*a, b]),
|
||||
"MAP": (
|
||||
dict,
|
||||
lambda a, b: always_merger.merge(
|
||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = node.value[1].value
|
||||
self.item_body = loader.construct_object(node.value[2])
|
||||
self.__current_context: tuple[Any, Any] = tuple()
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return self.__current_context
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
iterable = self.iterable.resolve(entry, blueprint)
|
||||
else:
|
||||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
iterable = tuple(iterable.items())
|
||||
else:
|
||||
iterable = tuple(enumerate(iterable))
|
||||
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
self.__current_context = tuple()
|
||||
|
||||
try:
|
||||
for item in iterable:
|
||||
self.__current_context = item
|
||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class EnumeratedItem(YAMLTag):
|
||||
"""Get the current item value and index provided by an Enumerate tag context"""
|
||||
|
||||
depth: int
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
context_tag: Enumerate = entry._get_tag_context(
|
||||
depth=self.depth,
|
||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
||||
class Index(EnumeratedItem):
|
||||
"""Get the current item index provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
"""Get the current item value provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
"""Dump dataclasses to yaml"""
|
||||
|
||||
default_flow_style = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
|
||||
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
|
||||
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
|
||||
self.add_representer(
|
||||
BlueprintEntryDesiredState, lambda self, data: self.represent_str(data.value)
|
||||
)
|
||||
self.add_representer(None, lambda self, data: self.represent_str(str(data)))
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
"""Don't use any YAML anchors"""
|
||||
return True
|
||||
|
||||
def represent(self, data) -> None:
|
||||
if is_dataclass(data):
|
||||
|
||||
def factory(items):
|
||||
final_dict = dict(items)
|
||||
final_dict.pop("_state", None)
|
||||
return final_dict
|
||||
|
||||
data = asdict(data, dict_factory=factory)
|
||||
return super().represent(data)
|
||||
|
||||
|
||||
class BlueprintLoader(SafeLoader):
|
||||
"""Loader for blueprints with custom tag support"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.add_constructor("!KeyOf", KeyOf)
|
||||
self.add_constructor("!Find", Find)
|
||||
self.add_constructor("!Context", Context)
|
||||
self.add_constructor("!Format", Format)
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
|
||||
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
serializer_errors: Optional[dict]
|
||||
|
||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.serializer_errors = serializer_errors
|
@ -1,149 +0,0 @@
|
||||
"""Blueprint exporter"""
|
||||
from typing import Iterable
|
||||
from uuid import UUID
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db.models import Model, Q, QuerySet
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintDumper,
|
||||
BlueprintEntry,
|
||||
BlueprintMetadata,
|
||||
)
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_GENERATED
|
||||
from authentik.events.models import Event
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
from authentik.stages.prompt.models import PromptStage
|
||||
|
||||
|
||||
class Exporter:
|
||||
"""Export flow with attached stages into yaml"""
|
||||
|
||||
excluded_models: list[type[Model]] = []
|
||||
|
||||
def __init__(self):
|
||||
self.excluded_models = [
|
||||
Event,
|
||||
]
|
||||
|
||||
def get_entries(self) -> Iterable[BlueprintEntry]:
|
||||
"""Get blueprint entries"""
|
||||
for model in apps.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
if model in self.excluded_models:
|
||||
continue
|
||||
for obj in self.get_model_instances(model):
|
||||
yield BlueprintEntry.from_model(obj)
|
||||
|
||||
def get_model_instances(self, model: type[Model]) -> QuerySet:
|
||||
"""Return a queryset for `model`. Can be used to filter some
|
||||
objects on some models"""
|
||||
if model == get_user_model():
|
||||
return model.objects.exclude(pk=get_anonymous_user().pk)
|
||||
return model.objects.all()
|
||||
|
||||
def _pre_export(self, blueprint: Blueprint):
|
||||
"""Hook to run anything pre-export"""
|
||||
|
||||
def export(self) -> Blueprint:
|
||||
"""Create a list of all objects and create a blueprint"""
|
||||
blueprint = Blueprint()
|
||||
self._pre_export(blueprint)
|
||||
blueprint.metadata = BlueprintMetadata(
|
||||
name=_("authentik Export - %(date)s" % {"date": str(now())}),
|
||||
labels={
|
||||
LABEL_AUTHENTIK_GENERATED: "true",
|
||||
},
|
||||
)
|
||||
blueprint.entries = list(self.get_entries())
|
||||
return blueprint
|
||||
|
||||
def export_to_string(self) -> str:
|
||||
"""Call export and convert it to yaml"""
|
||||
blueprint = self.export()
|
||||
return dump(blueprint, Dumper=BlueprintDumper)
|
||||
|
||||
|
||||
class FlowExporter(Exporter):
|
||||
"""Exporter customised to only return objects related to `flow`"""
|
||||
|
||||
flow: Flow
|
||||
with_policies: bool
|
||||
with_stage_prompts: bool
|
||||
|
||||
pbm_uuids: list[UUID]
|
||||
|
||||
def __init__(self, flow: Flow):
|
||||
super().__init__()
|
||||
self.flow = flow
|
||||
self.with_policies = True
|
||||
self.with_stage_prompts = True
|
||||
|
||||
def _pre_export(self, blueprint: Blueprint):
|
||||
if not self.with_policies:
|
||||
return
|
||||
self.pbm_uuids = [self.flow.pbm_uuid]
|
||||
self.pbm_uuids += FlowStageBinding.objects.filter(target=self.flow).values_list(
|
||||
"pbm_uuid", flat=True
|
||||
)
|
||||
|
||||
def walk_stages(self) -> Iterable[BlueprintEntry]:
|
||||
"""Convert all stages attached to self.flow into BlueprintEntry objects"""
|
||||
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
|
||||
for stage in stages:
|
||||
if isinstance(stage, PromptStage):
|
||||
pass
|
||||
yield BlueprintEntry.from_model(stage, "name")
|
||||
|
||||
def walk_stage_bindings(self) -> Iterable[BlueprintEntry]:
|
||||
"""Convert all bindings attached to self.flow into BlueprintEntry objects"""
|
||||
bindings = FlowStageBinding.objects.filter(target=self.flow).select_related()
|
||||
for binding in bindings:
|
||||
yield BlueprintEntry.from_model(binding, "target", "stage", "order")
|
||||
|
||||
def walk_policies(self) -> Iterable[BlueprintEntry]:
|
||||
"""Walk over all policies. This is done at the beginning of the export for stages that have
|
||||
a direct foreign key to a policy."""
|
||||
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
|
||||
# all policies referenced in there we also include here
|
||||
prompt_stages = PromptStage.objects.filter(flow=self.flow).values_list("pk", flat=True)
|
||||
query = Q(bindings__in=self.pbm_uuids) | Q(promptstage__in=prompt_stages)
|
||||
policies = Policy.objects.filter(query).select_related()
|
||||
for policy in policies:
|
||||
yield BlueprintEntry.from_model(policy)
|
||||
|
||||
def walk_policy_bindings(self) -> Iterable[BlueprintEntry]:
|
||||
"""Walk over all policybindings relative to us. This is run at the end of the export, as
|
||||
we are sure all objects exist now."""
|
||||
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
|
||||
for binding in bindings:
|
||||
yield BlueprintEntry.from_model(binding, "policy", "target", "order")
|
||||
|
||||
def walk_stage_prompts(self) -> Iterable[BlueprintEntry]:
|
||||
"""Walk over all prompts associated with any PromptStages"""
|
||||
prompt_stages = PromptStage.objects.filter(flow=self.flow)
|
||||
for stage in prompt_stages:
|
||||
for prompt in stage.fields.all():
|
||||
yield BlueprintEntry.from_model(prompt)
|
||||
|
||||
def get_entries(self) -> Iterable[BlueprintEntry]:
|
||||
entries = []
|
||||
entries.append(BlueprintEntry.from_model(self.flow, "slug"))
|
||||
if self.with_stage_prompts:
|
||||
entries.extend(self.walk_stage_prompts())
|
||||
if self.with_policies:
|
||||
entries.extend(self.walk_policies())
|
||||
entries.extend(self.walk_stages())
|
||||
entries.extend(self.walk_stage_bindings())
|
||||
if self.with_policies:
|
||||
entries.extend(self.walk_policy_bindings())
|
||||
return entries
|
@ -1,296 +0,0 @@
|
||||
"""Blueprint importer"""
|
||||
from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional
|
||||
|
||||
from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
from structlog.testing import capture_logs
|
||||
from structlog.types import EventDict
|
||||
from yaml import load
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintEntry,
|
||||
BlueprintEntryDesiredState,
|
||||
BlueprintEntryState,
|
||||
BlueprintLoader,
|
||||
EntryInvalidError,
|
||||
)
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.core.models import (
|
||||
AuthenticatedSession,
|
||||
PropertyMapping,
|
||||
Provider,
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
# pylint: disable=imported-auth-user
|
||||
from django.contrib.auth.models import Group as DjangoGroup
|
||||
from django.contrib.auth.models import User as DjangoUser
|
||||
|
||||
excluded_models = (
|
||||
DjangoUser,
|
||||
DjangoGroup,
|
||||
# Base classes
|
||||
Provider,
|
||||
Source,
|
||||
PropertyMapping,
|
||||
UserSourceConnection,
|
||||
Stage,
|
||||
OutpostServiceConnection,
|
||||
Policy,
|
||||
PolicyBindingModel,
|
||||
# Classes that have other dependencies
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
FlowToken,
|
||||
)
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def transaction_rollback():
|
||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||
atomic = transaction.atomic()
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
atomic.__enter__()
|
||||
yield
|
||||
atomic.__exit__(IntegrityError, None, None)
|
||||
|
||||
|
||||
class Importer:
|
||||
"""Import Blueprint from YAML"""
|
||||
|
||||
logger: BoundLogger
|
||||
|
||||
def __init__(self, yaml_input: str, context: Optional[dict] = None):
|
||||
self.__pk_map: dict[Any, Model] = {}
|
||||
self.logger = get_logger()
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
self.__import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self.__import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self.__import.context = ctx
|
||||
|
||||
@property
|
||||
def blueprint(self) -> Blueprint:
|
||||
"""Get imported blueprint"""
|
||||
return self.__import
|
||||
|
||||
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Replace any value if it is a known primary key of an other object"""
|
||||
|
||||
def updater(value) -> Any:
|
||||
if value in self.__pk_map:
|
||||
self.logger.debug("updating reference in entry", value=value)
|
||||
return self.__pk_map[value]
|
||||
return value
|
||||
|
||||
for key, value in attrs.items():
|
||||
try:
|
||||
if isinstance(value, dict):
|
||||
for idx, _inner_key in enumerate(value):
|
||||
value[_inner_key] = updater(value[_inner_key])
|
||||
elif isinstance(value, list):
|
||||
for idx, _inner_value in enumerate(value):
|
||||
attrs[key][idx] = updater(_inner_value)
|
||||
else:
|
||||
attrs[key] = updater(value)
|
||||
except TypeError:
|
||||
continue
|
||||
return attrs
|
||||
|
||||
def __query_from_identifier(self, attrs: dict[str, Any]) -> Q:
|
||||
"""Generate an or'd query from all identifiers in an entry"""
|
||||
# Since identifiers can also be pk-references to other objects (see FlowStageBinding)
|
||||
# we have to ensure those references are also replaced
|
||||
main_query = Q()
|
||||
if "pk" in attrs:
|
||||
main_query = Q(pk=attrs["pk"])
|
||||
sub_query = Q()
|
||||
for identifier, value in attrs.items():
|
||||
if identifier == "pk":
|
||||
continue
|
||||
if isinstance(value, dict):
|
||||
sub_query &= Q(**{f"{identifier}__contains": value})
|
||||
else:
|
||||
sub_query &= Q(**{identifier: value})
|
||||
|
||||
return main_query | sub_query
|
||||
|
||||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
"""Validate a single entry"""
|
||||
if not entry.check_all_conditions_match(self.__import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
return None
|
||||
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
# If we try to validate without referencing a possible instance
|
||||
# we'll get a duplicate error, hence we load the model here and return
|
||||
# the full serializer for later usage
|
||||
# Because a model might have multiple unique columns, we chain all identifiers together
|
||||
# to create an OR query.
|
||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import))
|
||||
for key, value in list(updated_identifiers.items()):
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
updated_identifiers[f"{key}"] = value["pk"]
|
||||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
existing_models = model.objects.filter(query)
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
|
||||
self.logger.debug("instance exists, skipping")
|
||||
return None
|
||||
self.logger.debug(
|
||||
"initialise serializer with instance",
|
||||
model=model,
|
||||
instance=model_instance,
|
||||
pk=model_instance.pk,
|
||||
)
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance", model=model, **updated_identifiers
|
||||
)
|
||||
model_instance = model()
|
||||
# pk needs to be set on the model instance otherwise a new one will be generated
|
||||
if "pk" in updated_identifiers:
|
||||
model_instance.pk = updated_identifiers["pk"]
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError(exc) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
def apply(self) -> bool:
|
||||
"""Apply (create/update) models yaml, in database transaction"""
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if not self._apply_models():
|
||||
self.logger.debug("Reverting changes due to error")
|
||||
raise IntegrityError
|
||||
except IntegrityError:
|
||||
return False
|
||||
else:
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self.__import.entries:
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
try:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
self.logger.warning(
|
||||
"app or model does not exist", app=model_app_label, model=model_name
|
||||
)
|
||||
return False
|
||||
# Validate each single entry
|
||||
try:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
]:
|
||||
model = serializer.save()
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||
entry._state = BlueprintEntryState(model)
|
||||
self.logger.debug("updated model", model=model)
|
||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||
instance: Optional[Model] = serializer.instance
|
||||
if instance.pk:
|
||||
instance.delete()
|
||||
self.logger.debug("deleted model", mode=instance)
|
||||
continue
|
||||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, []
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
):
|
||||
successful = self._apply_models()
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.__import = orig_import
|
||||
return successful, logs
|
@ -1,5 +0,0 @@
|
||||
"""Blueprint labels"""
|
||||
|
||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
@ -1,60 +0,0 @@
|
||||
"""Apply Blueprint meta model"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, JSONField
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
"""Serializer for meta apply blueprint model"""
|
||||
|
||||
identifiers = JSONField(validators=[is_dict])
|
||||
required = BooleanField(default=True)
|
||||
|
||||
# We cannot override `instance` as that will confuse rest_framework
|
||||
# and make it attempt to update the instance
|
||||
blueprint_instance: "BlueprintInstance"
|
||||
|
||||
def validate(self, attrs):
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
identifiers = attrs["identifiers"]
|
||||
required = attrs["required"]
|
||||
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
||||
if not instance and required:
|
||||
raise ValidationError("Required blueprint does not exist")
|
||||
self.blueprint_instance = instance
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data: dict) -> MetaResult:
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint
|
||||
|
||||
if not self.blueprint_instance:
|
||||
LOGGER.info("Blueprint does not exist, but not required")
|
||||
return MetaResult()
|
||||
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
apply_blueprint(str(self.blueprint_instance.pk))
|
||||
return MetaResult()
|
||||
|
||||
|
||||
@registry.register("metaapplyblueprint")
|
||||
class MetaApplyBlueprint(BaseMetaModel):
|
||||
"""Meta model to apply another blueprint"""
|
||||
|
||||
@staticmethod
|
||||
def serializer() -> ApplyBlueprintMetaSerializer:
|
||||
return ApplyBlueprintMetaSerializer
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
@ -1,61 +0,0 @@
|
||||
"""Base models"""
|
||||
from django.apps import apps
|
||||
from django.db.models import Model
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
|
||||
class BaseMetaModel(Model):
|
||||
"""Base models"""
|
||||
|
||||
@staticmethod
|
||||
def serializer() -> Serializer:
|
||||
"""Serializer similar to SerializerModel, but as a static method since
|
||||
this is an abstract model"""
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class MetaResult:
|
||||
"""Result returned by Meta Models' serializers. Empty class but we can't return none as
|
||||
the framework doesn't allow that"""
|
||||
|
||||
|
||||
class MetaModelRegistry:
|
||||
"""Registry for pseudo meta models"""
|
||||
|
||||
models: dict[str, BaseMetaModel]
|
||||
virtual_prefix: str
|
||||
|
||||
def __init__(self, prefix: str) -> None:
|
||||
self.models = {}
|
||||
self.virtual_prefix = prefix
|
||||
|
||||
def register(self, model_id: str):
|
||||
"""Register model class under `model_id`"""
|
||||
|
||||
def inner_wrapper(cls):
|
||||
self.models[model_id] = cls
|
||||
return cls
|
||||
|
||||
return inner_wrapper
|
||||
|
||||
def get_models(self):
|
||||
"""Wrapper for django's `get_models` to list all models"""
|
||||
models = apps.get_models()
|
||||
for _, value in self.models.items():
|
||||
models.append(value)
|
||||
return models
|
||||
|
||||
def get_model(self, app_label: str, model_id: str) -> type[Model]:
|
||||
"""Get model checks if any virtual models are registered, and falls back
|
||||
to actual django models"""
|
||||
if app_label.lower() == self.virtual_prefix:
|
||||
if model_id.lower() in self.models:
|
||||
return self.models[model_id]
|
||||
return apps.get_model(app_label, model_id)
|
||||
|
||||
|
||||
registry = MetaModelRegistry("authentik_blueprints")
|
@ -1,98 +0,0 @@
|
||||
"""OCI Client"""
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from structlog import get_logger
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
|
||||
|
||||
class OCIException(SentryIgnoredException):
|
||||
"""OCI-related errors"""
|
||||
|
||||
|
||||
class BlueprintOCIClient:
|
||||
"""Blueprint OCI Client"""
|
||||
|
||||
url: ParseResult
|
||||
sanitized_url: str
|
||||
logger: BoundLogger
|
||||
ref: str
|
||||
client: NewClient
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
self._parse_url(url)
|
||||
self.logger = get_logger().bind(url=self.sanitized_url)
|
||||
|
||||
self.ref = "latest"
|
||||
path = self.url.path[1:]
|
||||
if ":" in self.url.path:
|
||||
path, _, self.ref = path.partition(":")
|
||||
self.client = NewClient(
|
||||
f"https://{self.url.hostname}",
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(self.url.username, self.url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
|
||||
def _parse_url(self, url: str):
|
||||
self.url = urlparse(url)
|
||||
netloc = self.url.netloc
|
||||
if "@" in netloc:
|
||||
netloc = netloc[netloc.index("@") + 1 :]
|
||||
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
|
||||
|
||||
def fetch_manifests(self) -> dict[str, Any]:
|
||||
"""Fetch manifests for ref"""
|
||||
self.logger.info("Fetching OCI manifests for blueprint")
|
||||
manifest_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(self.ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
try:
|
||||
manifest_response = self.client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise OCIException(manifest["errors"])
|
||||
return manifest
|
||||
|
||||
def fetch_blobs(self, manifest: dict[str, Any]):
|
||||
"""Fetch blob based on manifest info"""
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
self.logger.debug("Found layer with matching media type", blob=blob)
|
||||
if not blob:
|
||||
raise OCIException("Blob not found")
|
||||
|
||||
blob_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
try:
|
||||
blob_response = self.client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
@ -1,221 +0,0 @@
|
||||
"""v1 blueprints tasks"""
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from hashlib import sha512
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from dacite.core import from_dict
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
from watchdog.events import (
|
||||
FileCreatedEvent,
|
||||
FileModifiedEvent,
|
||||
FileSystemEvent,
|
||||
FileSystemEventHandler,
|
||||
)
|
||||
from watchdog.observers import Observer
|
||||
from yaml import load
|
||||
from yaml.error import YAMLError
|
||||
|
||||
from authentik.blueprints.models import (
|
||||
BlueprintInstance,
|
||||
BlueprintInstanceStatus,
|
||||
BlueprintRetrievalFailed,
|
||||
)
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
_file_watcher_started = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintFile:
|
||||
"""Basic info about a blueprint file"""
|
||||
|
||||
path: str
|
||||
version: int
|
||||
hash: str
|
||||
last_m: int
|
||||
meta: Optional[BlueprintMetadata] = field(default=None)
|
||||
|
||||
|
||||
def start_blueprint_watcher():
|
||||
"""Start blueprint watcher, if it's not running already."""
|
||||
# This function might be called twice since it's called on celery startup
|
||||
# pylint: disable=global-statement
|
||||
global _file_watcher_started
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
|
||||
class BlueprintEventHandler(FileSystemEventHandler):
|
||||
"""Event handler for blueprint events"""
|
||||
|
||||
def on_any_event(self, event: FileSystemEvent):
|
||||
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discover.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
def blueprints_find_dict():
|
||||
"""Find blueprints as `blueprints_find` does, but return a safe dict"""
|
||||
blueprints = []
|
||||
for blueprint in blueprints_find():
|
||||
blueprints.append(sanitize_dict(asdict(blueprint)))
|
||||
return blueprints
|
||||
|
||||
|
||||
def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.y("blueprints_dir"))
|
||||
for path in root.glob("**/*.yaml"):
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||
except YAMLError as exc:
|
||||
raw_blueprint = None
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||
if not raw_blueprint:
|
||||
continue
|
||||
metadata = raw_blueprint.get("metadata", None)
|
||||
version = raw_blueprint.get("version", 1)
|
||||
if version != 1:
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||
continue
|
||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||
blueprint = BlueprintFile(
|
||||
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.info(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
)
|
||||
return blueprints
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discover(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL,
|
||||
messages=[_("Successfully imported %(count)d files." % {"count": count})],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
"""Check if blueprint should be imported"""
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=blueprint.path).first()
|
||||
if (
|
||||
blueprint.meta
|
||||
and blueprint.meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false"
|
||||
):
|
||||
return
|
||||
if not instance:
|
||||
instance = BlueprintInstance(
|
||||
name=blueprint.meta.name if blueprint.meta else str(blueprint.path),
|
||||
path=blueprint.path,
|
||||
context={},
|
||||
status=BlueprintInstanceStatus.UNKNOWN,
|
||||
enabled=True,
|
||||
managed_models=[],
|
||||
metadata={},
|
||||
)
|
||||
instance.save()
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
bind=True,
|
||||
base=MonitoredTask,
|
||||
)
|
||||
def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
"""Apply single blueprint"""
|
||||
self.save_on_success = False
|
||||
instance: Optional[BlueprintInstance] = None
|
||||
try:
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||
self.set_uid(slugify(instance.name))
|
||||
if not instance or not instance.enabled:
|
||||
return
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer(blueprint_content, instance.context)
|
||||
if importer.blueprint.metadata:
|
||||
instance.metadata = asdict(importer.blueprint.metadata)
|
||||
valid, logs = importer.validate()
|
||||
if not valid:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
|
||||
return
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
|
||||
return
|
||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||
instance.last_applied_hash = file_hash
|
||||
instance.last_applied = now()
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||
except (
|
||||
DatabaseError,
|
||||
ProgrammingError,
|
||||
InternalError,
|
||||
IOError,
|
||||
BlueprintRetrievalFailed,
|
||||
EntryInvalidError,
|
||||
) as exc:
|
||||
if instance:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
@ -1,17 +1,12 @@
|
||||
"""Application API Views"""
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -19,21 +14,14 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import FilePathSerializer, FileUploadSerializer
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.utils.file import (
|
||||
FilePathSerializer,
|
||||
FileUploadSerializer,
|
||||
set_file,
|
||||
set_file_url,
|
||||
)
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
@ -44,24 +32,17 @@ LOGGER = get_logger()
|
||||
|
||||
def user_app_cache_key(user_pk: str) -> str:
|
||||
"""Cache key where application list for user is saved"""
|
||||
return f"goauthentik.io/core/app_access/{user_pk}"
|
||||
return f"user_app_cache_{user_pk}"
|
||||
|
||||
|
||||
class ApplicationSerializer(ModelSerializer):
|
||||
"""Application Serializer"""
|
||||
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
|
||||
launch_url = ReadOnlyField(source="get_launch_url")
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False)
|
||||
|
||||
meta_icon = ReadOnlyField(source="get_meta_icon")
|
||||
|
||||
def get_launch_url(self, app: Application) -> Optional[str]:
|
||||
"""Allow formatting of launch URL"""
|
||||
user = None
|
||||
if "request" in self.context:
|
||||
user = self.context["request"].user
|
||||
return app.get_launch_url(user)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Application
|
||||
@ -72,13 +53,11 @@ class ApplicationSerializer(ModelSerializer):
|
||||
"provider",
|
||||
"provider_obj",
|
||||
"launch_url",
|
||||
"open_in_new_tab",
|
||||
"meta_launch_url",
|
||||
"meta_icon",
|
||||
"meta_description",
|
||||
"meta_publisher",
|
||||
"policy_engine_mode",
|
||||
"group",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"meta_icon": {"read_only": True},
|
||||
@ -96,15 +75,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"meta_launch_url",
|
||||
"meta_description",
|
||||
"meta_publisher",
|
||||
"group",
|
||||
]
|
||||
filterset_fields = [
|
||||
"name",
|
||||
"slug",
|
||||
"meta_launch_url",
|
||||
"meta_description",
|
||||
"meta_publisher",
|
||||
"group",
|
||||
]
|
||||
lookup_field = "slug"
|
||||
ordering = ["name"]
|
||||
@ -154,19 +124,12 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
return HttpResponseBadRequest("for_user must be numerical")
|
||||
engine = PolicyEngine(application, for_user, request)
|
||||
engine.use_cache = False
|
||||
with capture_logs() as logs:
|
||||
engine.build()
|
||||
result = engine.result
|
||||
engine.build()
|
||||
result = engine.result
|
||||
response = PolicyTestResultSerializer(PolicyResult(False))
|
||||
if result.passing:
|
||||
response = PolicyTestResultSerializer(PolicyResult(True))
|
||||
if request.user.is_superuser:
|
||||
log_messages = []
|
||||
for log in logs:
|
||||
if log.get("process", "") == "PolicyProcess":
|
||||
continue
|
||||
log_messages.append(sanitize_dict(log))
|
||||
result.log_messages = log_messages
|
||||
response = PolicyTestResultSerializer(result)
|
||||
return Response(response.data)
|
||||
|
||||
@ -198,9 +161,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if not should_cache:
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
if should_cache:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
||||
if not allowed_applications:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
cache.set(
|
||||
user_app_cache_key(self.request.user.pk),
|
||||
@ -227,10 +190,21 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set application icon"""
|
||||
app: Application = self.get_object()
|
||||
return set_file(request, app, "meta_icon")
|
||||
icon = request.FILES.get("file", None)
|
||||
clear = request.data.get("clear", "false").lower() == "true"
|
||||
if clear:
|
||||
# .delete() saves the model by default
|
||||
app.meta_icon.delete()
|
||||
return Response({})
|
||||
if icon:
|
||||
app.meta_icon = icon
|
||||
app.save()
|
||||
return Response({})
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
@permission_required("authentik_core.change_application")
|
||||
@extend_schema(
|
||||
@ -246,22 +220,27 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set application icon (as URL)"""
|
||||
app: Application = self.get_object()
|
||||
return set_file_url(request, app, "meta_icon")
|
||||
url = request.data.get("url", None)
|
||||
if url is None:
|
||||
return HttpResponseBadRequest()
|
||||
app.meta_icon.name = url
|
||||
app.save()
|
||||
return Response({})
|
||||
|
||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
def metrics(self, request: Request, slug: str):
|
||||
"""Metrics for application logins"""
|
||||
app = self.get_object()
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
get_events_per_1h(
|
||||
action=EventAction.AUTHORIZE_APPLICATION,
|
||||
context__authorized_application__pk=app.pk.hex,
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
@ -11,7 +11,6 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from ua_parser import user_agent_parser
|
||||
|
||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.geo import GEOIP_READER, GeoIPDict
|
||||
@ -103,8 +102,11 @@ class AuthenticatedSessionViewSet(
|
||||
search_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
ordering = ["user__username"]
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
filter_backends = [
|
||||
DjangoFilterBackend,
|
||||
OrderingFilter,
|
||||
SearchFilter,
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user if self.request else get_anonymous_user()
|
||||
|
@ -1,68 +0,0 @@
|
||||
"""Authenticator Devices API Views"""
|
||||
from django_otp import device_classes, devices_for_user
|
||||
from django_otp.models import Device
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
|
||||
|
||||
class DeviceSerializer(MetaNameSerializer):
|
||||
"""Serializer for Duo authenticator devices"""
|
||||
|
||||
pk = IntegerField()
|
||||
name = CharField()
|
||||
type = SerializerMethodField()
|
||||
confirmed = BooleanField()
|
||||
|
||||
def get_type(self, instance: Device) -> str:
|
||||
"""Get type of device"""
|
||||
return instance._meta.label
|
||||
|
||||
|
||||
class DeviceViewSet(ViewSet):
|
||||
"""Viewset for authenticator devices"""
|
||||
|
||||
serializer_class = DeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@extend_schema(responses={200: DeviceSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
devices = devices_for_user(request.user)
|
||||
return Response(DeviceSerializer(devices, many=True).data)
|
||||
|
||||
|
||||
class AdminDeviceViewSet(ViewSet):
|
||||
"""Viewset for authenticator devices"""
|
||||
|
||||
serializer_class = DeviceSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
def get_devices(self, **kwargs):
|
||||
"""Get all devices in all child classes"""
|
||||
for model in device_classes():
|
||||
device_set = model.objects.filter(**kwargs)
|
||||
yield from device_set
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="user",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.INT,
|
||||
)
|
||||
],
|
||||
responses={200: DeviceSerializer(many=True)},
|
||||
)
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
kwargs = {}
|
||||
if "user" in request.query_params:
|
||||
kwargs = {"user": request.query_params["user"]}
|
||||
return Response(DeviceSerializer(self.get_devices(**kwargs), many=True).data)
|
@ -1,29 +1,21 @@
|
||||
"""Groups API Viewset"""
|
||||
from json import loads
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import Http404
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filters import ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
from rest_framework.fields import BooleanField, CharField, JSONField
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.api.utils import is_dict
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
is_superuser = BooleanField(read_only=True)
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
@ -37,6 +29,7 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
"name",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"is_superuser",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
@ -51,41 +44,24 @@ class GroupSerializer(ModelSerializer):
|
||||
users_obj = ListSerializer(
|
||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||
)
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
"num_pk",
|
||||
"name",
|
||||
"is_superuser",
|
||||
"parent",
|
||||
"parent_name",
|
||||
"users",
|
||||
"attributes",
|
||||
"users_obj",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"users": {
|
||||
"default": list,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class GroupFilter(FilterSet):
|
||||
"""Filter for groups"""
|
||||
|
||||
attributes = CharFilter(
|
||||
field_name="attributes",
|
||||
lookup_expr="",
|
||||
label="Attributes",
|
||||
method="filter_attributes",
|
||||
)
|
||||
|
||||
members_by_username = ModelMultipleChoiceFilter(
|
||||
field_name="users__username",
|
||||
to_field_name="username",
|
||||
@ -96,33 +72,10 @@ class GroupFilter(FilterSet):
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
value = loads(value)
|
||||
except ValueError:
|
||||
raise ValidationError(detail="filter: failed to parse JSON")
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError(detail="filter: value must be key:value mapping")
|
||||
qs = {}
|
||||
for key, _value in value.items():
|
||||
qs[f"attributes__{key}"] = _value
|
||||
try:
|
||||
_ = len(queryset.filter(**qs))
|
||||
return queryset.filter(**qs)
|
||||
except ValueError:
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||
|
||||
|
||||
class UserAccountSerializer(PassiveSerializer):
|
||||
"""Account adding/removing operations"""
|
||||
|
||||
pk = IntegerField(required=True)
|
||||
fields = ["name", "is_superuser", "members_by_pk", "members_by_username"]
|
||||
|
||||
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
@ -146,51 +99,3 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
if self.request.user.has_perm("authentik_core.view_group"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def add_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.add(user)
|
||||
return Response(status=204)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def remove_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.remove(user)
|
||||
return Response(status=204)
|
||||
|
@ -14,13 +14,12 @@ from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.api import ManagedSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
|
||||
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||
from authentik.core.expression import PropertyMappingEvaluator
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.managed.api import ManagedSerializer
|
||||
from authentik.policies.api.exec import PolicyTestSerializer
|
||||
|
||||
|
||||
@ -42,9 +41,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
|
||||
def validate_expression(self, expression: str) -> str:
|
||||
"""Test Syntax"""
|
||||
evaluator = PropertyMappingEvaluator(
|
||||
self.instance,
|
||||
)
|
||||
evaluator = PropertyMappingEvaluator()
|
||||
evaluator.validate(expression)
|
||||
return expression
|
||||
|
||||
@ -59,7 +56,6 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
|
||||
|
||||
@ -117,6 +113,7 @@ class PropertyMappingViewSet(
|
||||
],
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
# pylint: disable=unused-argument, invalid-name
|
||||
def test(self, request: Request, pk: str) -> Response:
|
||||
"""Test Property Mapping"""
|
||||
mapping: PropertyMapping = self.get_object()
|
||||
@ -140,9 +137,7 @@ class PropertyMappingViewSet(
|
||||
self.request,
|
||||
**test_params.validated_data.get("context", {}),
|
||||
)
|
||||
response_data["result"] = dumps(
|
||||
sanitize_item(result), indent=(4 if format_result else None)
|
||||
)
|
||||
response_data["result"] = dumps(result, indent=(4 if format_result else None))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
response_data["result"] = str(exc)
|
||||
response_data["successful"] = False
|
||||
|
@ -43,7 +43,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"assigned_application_name",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
|
||||
|
||||
|
@ -1,30 +1,19 @@
|
||||
"""Source API Views"""
|
||||
from typing import Iterable
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Source, UserSourceConnection
|
||||
from authentik.core.models import Source
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.lib.utils.file import (
|
||||
FilePathSerializer,
|
||||
FileUploadSerializer,
|
||||
set_file,
|
||||
set_file_url,
|
||||
)
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
|
||||
@ -34,9 +23,7 @@ LOGGER = get_logger()
|
||||
class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"""Source Serializer"""
|
||||
|
||||
managed = ReadOnlyField()
|
||||
component = SerializerMethodField()
|
||||
icon = ReadOnlyField(source="get_icon")
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
@ -58,12 +45,8 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"component",
|
||||
"verbose_name",
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"policy_engine_mode",
|
||||
"user_matching_mode",
|
||||
"managed",
|
||||
"user_path_template",
|
||||
"icon",
|
||||
]
|
||||
|
||||
|
||||
@ -79,53 +62,10 @@ class SourceViewSet(
|
||||
queryset = Source.objects.none()
|
||||
serializer_class = SourceSerializer
|
||||
lookup_field = "slug"
|
||||
search_fields = ["slug", "name"]
|
||||
filterset_fields = ["slug", "name", "managed"]
|
||||
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return Source.objects.select_subclasses()
|
||||
|
||||
@permission_required("authentik_core.change_source")
|
||||
@extend_schema(
|
||||
request={
|
||||
"multipart/form-data": FileUploadSerializer,
|
||||
},
|
||||
responses={
|
||||
200: OpenApiResponse(description="Success"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=True,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set source icon"""
|
||||
source: Source = self.get_object()
|
||||
return set_file(request, source, "icon")
|
||||
|
||||
@permission_required("authentik_core.change_source")
|
||||
@extend_schema(
|
||||
request=FilePathSerializer,
|
||||
responses={
|
||||
200: OpenApiResponse(description="Success"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=True,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set source icon (as URL)"""
|
||||
source: Source = self.get_object()
|
||||
return set_file_url(request, source, "icon")
|
||||
|
||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def types(self, request: Request) -> Response:
|
||||
@ -155,57 +95,19 @@ class SourceViewSet(
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_settings(self, request: Request) -> Response:
|
||||
"""Get all sources the user can configure"""
|
||||
_all_sources: Iterable[Source] = (
|
||||
Source.objects.filter(enabled=True).select_subclasses().order_by("name")
|
||||
)
|
||||
_all_sources: Iterable[Source] = Source.objects.filter(enabled=True).select_subclasses()
|
||||
matching_sources: list[UserSettingSerializer] = []
|
||||
for source in _all_sources:
|
||||
user_settings = source.ui_user_settings()
|
||||
user_settings = source.ui_user_settings
|
||||
if not user_settings:
|
||||
continue
|
||||
policy_engine = PolicyEngine(source, request.user, request)
|
||||
policy_engine.build()
|
||||
if not policy_engine.passing:
|
||||
continue
|
||||
source_settings = source.ui_user_settings()
|
||||
source_settings = source.ui_user_settings
|
||||
source_settings.initial_data["object_uid"] = source.slug
|
||||
if not source_settings.is_valid():
|
||||
LOGGER.warning(source_settings.errors)
|
||||
matching_sources.append(source_settings.validated_data)
|
||||
return Response(matching_sources)
|
||||
|
||||
|
||||
class UserSourceConnectionSerializer(SourceSerializer):
|
||||
"""OAuth Source Serializer"""
|
||||
|
||||
source = SourceSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = UserSourceConnection
|
||||
fields = [
|
||||
"pk",
|
||||
"user",
|
||||
"source",
|
||||
"created",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"user": {"read_only": True},
|
||||
"created": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class UserSourceConnectionViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
"""User-source connection Viewset"""
|
||||
|
||||
queryset = UserSourceConnection.objects.all()
|
||||
serializer_class = UserSourceConnectionSerializer
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["pk"]
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user