Compare commits
59 Commits
version/20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
e095e9f694 | |||
10e311534f | |||
46fdb45273 | |||
6d4125cb90 | |||
bc83176962 | |||
0fa8432b72 | |||
bb9a524b53 | |||
d31c05625b | |||
399223b770 | |||
19197d3f9b | |||
1cd000dfe2 | |||
00ae97944a | |||
9f3ccfb7c7 | |||
9ed9c39ac8 | |||
30b6eeee9f | |||
afe2621783 | |||
8b12c6a01a | |||
f63adfed96 | |||
9c8fec21cf | |||
4776d2bcc5 | |||
a15a040362 | |||
fcd6dc1d60 | |||
acc3b59869 | |||
d9d5ac10e6 | |||
750669dcab | |||
88a3eed67e | |||
6c214fffc4 | |||
70100fc105 | |||
3c1163fabd | |||
539e8242ff | |||
2648333590 | |||
fe828ef993 | |||
29a6530742 | |||
a6b9274c4f | |||
a2a67161ac | |||
2e8263a99b | |||
6b9afed21f | |||
1eb1f4e0b8 | |||
7c3d60ec3a | |||
a494c6b6e8 | |||
6604d3577f | |||
f8bfa7e16a | |||
ea6cf6eabf | |||
769ce3ce7b | |||
3891fb3fa8 | |||
41eb965350 | |||
8d95612287 | |||
82b5274b15 | |||
af56ce3d78 | |||
f5c6e7aeb0 | |||
3809400e93 | |||
1def9865cf | |||
3716298639 | |||
c16317d7cf | |||
bbb8fa8269 | |||
e4c251a178 | |||
0fefd5f522 | |||
88057db0b0 | |||
91cb6c9beb |
@ -1,20 +1,12 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2024.2.0-rc1
|
current_version = 2023.10.7
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
serialize =
|
serialize = {major}.{minor}.{patch}
|
||||||
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
|
||||||
{major}.{minor}.{patch}
|
|
||||||
message = release: {new_version}
|
message = release: {new_version}
|
||||||
tag_name = version/{new_version}
|
tag_name = version/{new_version}
|
||||||
|
|
||||||
[bumpversion:part:rc_t]
|
|
||||||
values =
|
|
||||||
rc
|
|
||||||
final
|
|
||||||
optional_value = final
|
|
||||||
|
|
||||||
[bumpversion:file:pyproject.toml]
|
[bumpversion:file:pyproject.toml]
|
||||||
|
|
||||||
[bumpversion:file:docker-compose.yml]
|
[bumpversion:file:docker-compose.yml]
|
||||||
|
@ -9,4 +9,3 @@ blueprints/local
|
|||||||
.git
|
.git
|
||||||
!gen-ts-api/node_modules
|
!gen-ts-api/node_modules
|
||||||
!gen-ts-api/dist/**
|
!gen-ts-api/dist/**
|
||||||
!gen-go-api/
|
|
||||||
|
@ -9,6 +9,9 @@ inputs:
|
|||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
|
- name: Generate config
|
||||||
|
id: ev
|
||||||
|
uses: ./.github/actions/docker-push-variables
|
||||||
- name: Find Comment
|
- name: Find Comment
|
||||||
uses: peter-evans/find-comment@v2
|
uses: peter-evans/find-comment@v2
|
||||||
id: fc
|
id: fc
|
||||||
|
90
.github/actions/docker-push-variables/action.yml
vendored
90
.github/actions/docker-push-variables/action.yml
vendored
@ -1,33 +1,31 @@
|
|||||||
---
|
|
||||||
name: "Prepare docker environment variables"
|
name: "Prepare docker environment variables"
|
||||||
description: "Prepare docker environment variables"
|
description: "Prepare docker environment variables"
|
||||||
|
|
||||||
inputs:
|
|
||||||
image-name:
|
|
||||||
required: true
|
|
||||||
description: "Docker image prefix"
|
|
||||||
image-arch:
|
|
||||||
required: false
|
|
||||||
description: "Docker image arch"
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
|
shouldBuild:
|
||||||
|
description: "Whether to build image or not"
|
||||||
|
value: ${{ steps.ev.outputs.shouldBuild }}
|
||||||
|
branchName:
|
||||||
|
description: "Branch name"
|
||||||
|
value: ${{ steps.ev.outputs.branchName }}
|
||||||
|
branchNameContainer:
|
||||||
|
description: "Branch name (for containers)"
|
||||||
|
value: ${{ steps.ev.outputs.branchNameContainer }}
|
||||||
|
timestamp:
|
||||||
|
description: "Timestamp"
|
||||||
|
value: ${{ steps.ev.outputs.timestamp }}
|
||||||
sha:
|
sha:
|
||||||
description: "sha"
|
description: "sha"
|
||||||
value: ${{ steps.ev.outputs.sha }}
|
value: ${{ steps.ev.outputs.sha }}
|
||||||
|
shortHash:
|
||||||
|
description: "shortHash"
|
||||||
|
value: ${{ steps.ev.outputs.shortHash }}
|
||||||
version:
|
version:
|
||||||
description: "Version"
|
description: "version"
|
||||||
value: ${{ steps.ev.outputs.version }}
|
value: ${{ steps.ev.outputs.version }}
|
||||||
prerelease:
|
versionFamily:
|
||||||
description: "Prerelease"
|
description: "versionFamily"
|
||||||
value: ${{ steps.ev.outputs.prerelease }}
|
value: ${{ steps.ev.outputs.versionFamily }}
|
||||||
|
|
||||||
imageTags:
|
|
||||||
description: "Docker image tags"
|
|
||||||
value: ${{ steps.ev.outputs.imageTags }}
|
|
||||||
imageMainTag:
|
|
||||||
description: "Docker image main tag"
|
|
||||||
value: ${{ steps.ev.outputs.imageMainTag }}
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@ -47,48 +45,20 @@ runs:
|
|||||||
branch_name = os.environ["GITHUB_REF"]
|
branch_name = os.environ["GITHUB_REF"]
|
||||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||||
|
|
||||||
|
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
||||||
|
version = parser.get("bumpversion", "current_version")
|
||||||
|
version_family = ".".join(version.split(".")[:-1])
|
||||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||||
|
|
||||||
image_names = "${{ inputs.image-name }}".split(",")
|
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
||||||
image_arch = "${{ inputs.image-arch }}" or None
|
|
||||||
|
|
||||||
is_pull_request = bool("${{ github.event.pull_request.head.sha }}")
|
|
||||||
is_release = "dev" not in image_names[0]
|
|
||||||
|
|
||||||
sha = os.environ["GITHUB_SHA"] if not is_pull_request else "${{ github.event.pull_request.head.sha }}"
|
|
||||||
|
|
||||||
# 2042.1.0 or 2042.1.0-rc1
|
|
||||||
version = parser.get("bumpversion", "current_version")
|
|
||||||
# 2042.1
|
|
||||||
version_family = ".".join(version.split("-", 1)[0].split(".")[:-1])
|
|
||||||
prerelease = "-" in version
|
|
||||||
|
|
||||||
image_tags = []
|
|
||||||
if is_release:
|
|
||||||
for name in image_names:
|
|
||||||
image_tags += [
|
|
||||||
f"{name}:{version}",
|
|
||||||
f"{name}:{version_family}",
|
|
||||||
]
|
|
||||||
if not prerelease:
|
|
||||||
image_tags += [f"{name}:latest"]
|
|
||||||
else:
|
|
||||||
suffix = ""
|
|
||||||
if image_arch and image_arch != "amd64":
|
|
||||||
suffix = f"-{image_arch}"
|
|
||||||
for name in image_names:
|
|
||||||
image_tags += [
|
|
||||||
f"{name}:gh-{sha}{suffix}", # Used for ArgoCD and PR comments
|
|
||||||
f"{name}:gh-{safe_branch_name}{suffix}", # For convenience
|
|
||||||
f"{name}:gh-{safe_branch_name}-{int(time())}-{sha[:7]}{suffix}", # Use by FluxCD
|
|
||||||
]
|
|
||||||
|
|
||||||
image_main_tag = image_tags[0]
|
|
||||||
image_tags_rendered = ",".join(image_tags)
|
|
||||||
|
|
||||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||||
|
print("branchName=%s" % branch_name, file=_output)
|
||||||
|
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||||
|
print("timestamp=%s" % int(time()), file=_output)
|
||||||
print("sha=%s" % sha, file=_output)
|
print("sha=%s" % sha, file=_output)
|
||||||
|
print("shortHash=%s" % sha[:7], file=_output)
|
||||||
|
print("shouldBuild=%s" % should_build, file=_output)
|
||||||
print("version=%s" % version, file=_output)
|
print("version=%s" % version, file=_output)
|
||||||
print("prerelease=%s" % prerelease, file=_output)
|
print("versionFamily=%s" % version_family, file=_output)
|
||||||
print("imageTags=%s" % image_tags_rendered, file=_output)
|
|
||||||
print("imageMainTag=%s" % image_main_tag, file=_output)
|
|
||||||
|
4
.github/actions/setup/action.yml
vendored
4
.github/actions/setup/action.yml
vendored
@ -4,7 +4,7 @@ description: "Setup authentik testing environment"
|
|||||||
inputs:
|
inputs:
|
||||||
postgresql_version:
|
postgresql_version:
|
||||||
description: "Optional postgresql image tag"
|
description: "Optional postgresql image tag"
|
||||||
default: "16"
|
default: "12"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@ -18,7 +18,7 @@ runs:
|
|||||||
- name: Setup python and restore poetry
|
- name: Setup python and restore poetry
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version-file: "pyproject.toml"
|
python-version-file: 'pyproject.toml'
|
||||||
cache: "poetry"
|
cache: "poetry"
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
|
2
.github/actions/setup/docker-compose.yml
vendored
2
.github/actions/setup/docker-compose.yml
vendored
@ -2,7 +2,7 @@ version: "3.7"
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
postgresql:
|
postgresql:
|
||||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
||||||
volumes:
|
volumes:
|
||||||
- db-data:/var/lib/postgresql/data
|
- db-data:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
|
2
.github/codespell-words.txt
vendored
2
.github/codespell-words.txt
vendored
@ -2,5 +2,3 @@ keypair
|
|||||||
keypairs
|
keypairs
|
||||||
hass
|
hass
|
||||||
warmup
|
warmup
|
||||||
ontext
|
|
||||||
singed
|
|
||||||
|
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@ -35,7 +35,6 @@ updates:
|
|||||||
sentry:
|
sentry:
|
||||||
patterns:
|
patterns:
|
||||||
- "@sentry/*"
|
- "@sentry/*"
|
||||||
- "@spotlightjs/*"
|
|
||||||
babel:
|
babel:
|
||||||
patterns:
|
patterns:
|
||||||
- "@babel/*"
|
- "@babel/*"
|
||||||
@ -67,7 +66,6 @@ updates:
|
|||||||
sentry:
|
sentry:
|
||||||
patterns:
|
patterns:
|
||||||
- "@sentry/*"
|
- "@sentry/*"
|
||||||
- "@spotlightjs/*"
|
|
||||||
babel:
|
babel:
|
||||||
patterns:
|
patterns:
|
||||||
- "@babel/*"
|
- "@babel/*"
|
||||||
|
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@ -27,6 +27,7 @@ If an API change has been made
|
|||||||
If changes to the frontend have been made
|
If changes to the frontend have been made
|
||||||
|
|
||||||
- [ ] The code has been formatted (`make web`)
|
- [ ] The code has been formatted (`make web`)
|
||||||
|
- [ ] The translation files have been updated (`make i18n-extract`)
|
||||||
|
|
||||||
If applicable
|
If applicable
|
||||||
|
|
||||||
|
111
.github/workflows/ci-main.yml
vendored
111
.github/workflows/ci-main.yml
vendored
@ -1,4 +1,3 @@
|
|||||||
---
|
|
||||||
name: authentik-ci-main
|
name: authentik-ci-main
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -8,7 +7,7 @@ on:
|
|||||||
- next
|
- next
|
||||||
- version-*
|
- version-*
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- website/**
|
- website
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@ -30,7 +29,7 @@ jobs:
|
|||||||
- codespell
|
- codespell
|
||||||
- isort
|
- isort
|
||||||
- pending-migrations
|
- pending-migrations
|
||||||
# - pylint
|
- pylint
|
||||||
- pyright
|
- pyright
|
||||||
- ruff
|
- ruff
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -62,6 +61,10 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
- name: Setup authentik env
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
with:
|
||||||
|
postgresql_version: ${{ matrix.psql }}
|
||||||
- name: checkout stable
|
- name: checkout stable
|
||||||
run: |
|
run: |
|
||||||
# Delete all poetry envs
|
# Delete all poetry envs
|
||||||
@ -73,7 +76,7 @@ jobs:
|
|||||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
mv ../.github ../scripts .
|
||||||
- name: Setup authentik env (stable)
|
- name: Setup authentik env (ensure stable deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
@ -87,20 +90,14 @@ jobs:
|
|||||||
git clean -d -fx .
|
git clean -d -fx .
|
||||||
git checkout $GITHUB_SHA
|
git checkout $GITHUB_SHA
|
||||||
# Delete previous poetry env
|
# Delete previous poetry env
|
||||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
rm -rf $(poetry env info --path)
|
||||||
|
poetry install
|
||||||
- name: Setup authentik env (ensure latest deps are installed)
|
- name: Setup authentik env (ensure latest deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
- name: migrate to latest
|
- name: migrate to latest
|
||||||
run: |
|
run: poetry run python -m lifecycle.migrate
|
||||||
poetry run python -m lifecycle.migrate
|
|
||||||
- name: run tests
|
|
||||||
env:
|
|
||||||
# Test in the main database that we just migrated from the previous stable version
|
|
||||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
|
||||||
run: |
|
|
||||||
poetry run make test
|
|
||||||
test-unittest:
|
test-unittest:
|
||||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -123,10 +120,9 @@ jobs:
|
|||||||
poetry run make test
|
poetry run make test
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
flags: unit
|
flags: unit
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -135,16 +131,15 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
uses: helm/kind-action@v1.9.0
|
uses: helm/kind-action@v1.8.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
poetry run coverage run manage.py test tests/integration
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
flags: integration
|
flags: integration
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
test-e2e:
|
test-e2e:
|
||||||
name: test-e2e (${{ matrix.job.name }})
|
name: test-e2e (${{ matrix.job.name }})
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -175,7 +170,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||||
- id: cache-web
|
- id: cache-web
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: web/dist
|
path: web/dist
|
||||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||||
@ -191,10 +186,9 @@ jobs:
|
|||||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
flags: e2e
|
flags: e2e
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
ci-core-mark:
|
ci-core-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint
|
- lint
|
||||||
@ -207,19 +201,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
build:
|
build:
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- amd64
|
|
||||||
- arm64
|
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload contianer images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
if: "github.repository == 'goauthentik/authentik'"
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@ -231,11 +218,11 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
with:
|
env:
|
||||||
image-name: ghcr.io/goauthentik/dev-server
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
image-arch: ${{ matrix.arch }}
|
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -249,32 +236,68 @@ jobs:
|
|||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
push: true
|
tags: |
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
platforms: linux/${{ matrix.arch }}
|
- name: Comment on PR
|
||||||
pr-comment:
|
if: github.event_name == 'pull_request'
|
||||||
needs:
|
continue-on-error: true
|
||||||
- build
|
uses: ./.github/actions/comment-pr-instructions
|
||||||
|
with:
|
||||||
|
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||||
|
build-arm64:
|
||||||
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to write comments on PRs
|
# Needed to upload contianer images to ghcr.io
|
||||||
pull-requests: write
|
packages: write
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
- name: Login to Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
image-name: ghcr.io/goauthentik/dev-server
|
registry: ghcr.io
|
||||||
- name: Comment on PR
|
username: ${{ github.repository_owner }}
|
||||||
uses: ./.github/actions/comment-pr-instructions
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: generate ts client
|
||||||
|
run: make gen-client-ts
|
||||||
|
- name: Build Docker Image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
tag: gh-${{ steps.ev.outputs.imageMainTag }}
|
context: .
|
||||||
|
secrets: |
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
|
tags: |
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
||||||
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||||
|
build-args: |
|
||||||
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
|
platforms: linux/arm64
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
25
.github/workflows/ci-outpost.yml
vendored
25
.github/workflows/ci-outpost.yml
vendored
@ -1,4 +1,3 @@
|
|||||||
---
|
|
||||||
name: authentik-ci-outpost
|
name: authentik-ci-outpost
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -17,7 +16,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Prepare and generate API
|
- name: Prepare and generate API
|
||||||
@ -29,7 +28,7 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
version: v1.54.2
|
version: v1.54.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
@ -38,7 +37,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -66,12 +65,10 @@ jobs:
|
|||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
- rac
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload contianer images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
if: "github.repository == 'goauthentik/authentik'"
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@ -83,10 +80,11 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
with:
|
env:
|
||||||
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -96,11 +94,15 @@ jobs:
|
|||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
|
tags: |
|
||||||
|
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||||
|
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
push: true
|
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
@ -117,14 +119,13 @@ jobs:
|
|||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
- rac
|
|
||||||
goos: [linux]
|
goos: [linux]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@ -27,10 +27,10 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v3
|
uses: github/codeql-action/autobuild@v2
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v2
|
||||||
|
4
.github/workflows/gha-cache-cleanup.yml
vendored
4
.github/workflows/gha-cache-cleanup.yml
vendored
@ -6,10 +6,6 @@ on:
|
|||||||
types:
|
types:
|
||||||
- closed
|
- closed
|
||||||
|
|
||||||
permissions:
|
|
||||||
# Permission to delete cache
|
|
||||||
actions: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||||
- uses: peter-evans/create-pull-request@v6
|
- uses: peter-evans/create-pull-request@v5
|
||||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
|
43
.github/workflows/release-publish.yml
vendored
43
.github/workflows/release-publish.yml
vendored
@ -1,4 +1,3 @@
|
|||||||
---
|
|
||||||
name: authentik-on-release
|
name: authentik-on-release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -20,8 +19,6 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
with:
|
|
||||||
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
@ -41,12 +38,21 @@ jobs:
|
|||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: ${{ github.event_name == 'release' }}
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
tags: |
|
||||||
|
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||||
|
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||||
|
beryju/authentik:latest,
|
||||||
|
ghcr.io/goauthentik/server:${{ steps.ev.outputs.version }},
|
||||||
|
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||||
|
ghcr.io/goauthentik/server:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
@ -59,10 +65,9 @@ jobs:
|
|||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
- rac
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@ -72,8 +77,6 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
with:
|
|
||||||
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
|
|
||||||
- name: make empty clients
|
- name: make empty clients
|
||||||
run: |
|
run: |
|
||||||
mkdir -p ./gen-ts-api
|
mkdir -p ./gen-ts-api
|
||||||
@ -92,11 +95,20 @@ jobs:
|
|||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: true
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
tags: |
|
||||||
|
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||||
|
beryju/authentik-${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||||
|
beryju/authentik-${{ matrix.type }}:latest,
|
||||||
|
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.version }},
|
||||||
|
ghcr.io/goauthentik/${{ matrix.type }}:${{ steps.ev.outputs.versionFamily }},
|
||||||
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
|
build-args: |
|
||||||
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -114,7 +126,7 @@ jobs:
|
|||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
@ -168,16 +180,15 @@ jobs:
|
|||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
with:
|
|
||||||
image-name: ghcr.io/goauthentik/server
|
|
||||||
- name: Get static files from docker image
|
- name: Get static files from docker image
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/goauthentik/server:${{ steps.ev.outputs.imageMainTag }}
|
docker pull ghcr.io/goauthentik/server:latest
|
||||||
container=$(docker container create ghcr.io/goauthentik/server:${{ steps.ev.outputs.imageMainTag }})
|
container=$(docker container create ghcr.io/goauthentik/server:latest)
|
||||||
docker cp ${container}:web/ .
|
docker cp ${container}:web/ .
|
||||||
- name: Create a Sentry.io release
|
- name: Create a Sentry.io release
|
||||||
uses: getsentry/action-release@v1
|
uses: getsentry/action-release@v1
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: authentik-security-inc
|
SENTRY_ORG: authentik-security-inc
|
||||||
|
15
.github/workflows/release-tag.yml
vendored
15
.github/workflows/release-tag.yml
vendored
@ -1,4 +1,3 @@
|
|||||||
---
|
|
||||||
name: authentik-on-tag
|
name: authentik-on-tag
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -29,11 +28,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- name: prepare variables
|
- name: Extract version number
|
||||||
uses: ./.github/actions/docker-push-variables
|
id: get_version
|
||||||
id: ev
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
image-name: ghcr.io/goauthentik/server
|
github-token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
script: |
|
||||||
|
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1.1.4
|
uses: actions/create-release@v1.1.4
|
||||||
@ -41,6 +42,6 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ github.ref }}
|
tag_name: ${{ github.ref }}
|
||||||
release_name: Release ${{ steps.ev.outputs.version }}
|
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: ${{ steps.ev.outputs.prerelease == 'true' }}
|
prerelease: false
|
||||||
|
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
|
11
.github/workflows/translation-advice.yml
vendored
11
.github/workflows/translation-advice.yml
vendored
@ -7,26 +7,21 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "!**"
|
- "!**"
|
||||||
- "locale/**"
|
- "locale/**"
|
||||||
- "!locale/en/**"
|
- "web/src/locales/**"
|
||||||
- "web/xliff/**"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
# Permission to write comment
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
post-comment:
|
post-comment:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Find Comment
|
- name: Find Comment
|
||||||
uses: peter-evans/find-comment@v3
|
uses: peter-evans/find-comment@v2
|
||||||
id: fc
|
id: fc
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
comment-author: "github-actions[bot]"
|
comment-author: "github-actions[bot]"
|
||||||
body-includes: authentik translations instructions
|
body-includes: authentik translations instructions
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
uses: peter-evans/create-or-update-comment@v4
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
---
|
name: authentik-backend-translate-compile
|
||||||
name: authentik-backend-translate-extract-compile
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
push:
|
||||||
- cron: "0 0 * * *" # every day at midnight
|
branches: [main]
|
||||||
|
paths:
|
||||||
|
- "locale/**"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -24,20 +25,16 @@ jobs:
|
|||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run extract
|
|
||||||
run: |
|
|
||||||
poetry run make i18n-extract
|
|
||||||
- name: run compile
|
- name: run compile
|
||||||
run: |
|
run: poetry run ak compilemessages
|
||||||
poetry run ak compilemessages
|
|
||||||
make web-check-compile
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v6
|
uses: peter-evans/create-pull-request@v5
|
||||||
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
branch: extract-compile-backend-translation
|
branch: compile-backend-translation
|
||||||
commit-message: "core, web: update translations"
|
commit-message: "core: compile backend translations"
|
||||||
title: "core, web: update translations"
|
title: "core: compile backend translations"
|
||||||
body: "core, web: update translations"
|
body: "core: compile backend translations"
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
signoff: true
|
signoff: true
|
4
.github/workflows/translation-rename.yml
vendored
4
.github/workflows/translation-rename.yml
vendored
@ -6,10 +6,6 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, reopened]
|
types: [opened, reopened]
|
||||||
|
|
||||||
permissions:
|
|
||||||
# Permission to rename PR
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
rename_pr:
|
rename_pr:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
2
.github/workflows/web-api-publish.yml
vendored
2
.github/workflows/web-api-publish.yml
vendored
@ -35,7 +35,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||||
npm i @goauthentik/api@$VERSION
|
npm i @goauthentik/api@$VERSION
|
||||||
- uses: peter-evans/create-pull-request@v6
|
- uses: peter-evans/create-pull-request@v5
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
1
.vscode/extensions.json
vendored
1
.vscode/extensions.json
vendored
@ -14,7 +14,6 @@
|
|||||||
"ms-python.pylint",
|
"ms-python.pylint",
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"ms-python.black-formatter",
|
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"Tobermory.es6-string-html",
|
"Tobermory.es6-string-html",
|
||||||
"unifiedjs.vscode-mdx",
|
"unifiedjs.vscode-mdx",
|
||||||
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@ -19,8 +19,10 @@
|
|||||||
"slo",
|
"slo",
|
||||||
"scim",
|
"scim",
|
||||||
],
|
],
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
"todo-tree.tree.showBadges": true,
|
"todo-tree.tree.showBadges": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
"yaml.customTags": [
|
"yaml.customTags": [
|
||||||
"!Find sequence",
|
"!Find sequence",
|
||||||
"!KeyOf scalar",
|
"!KeyOf scalar",
|
||||||
|
@ -11,8 +11,6 @@ scripts/ @goauthentik/backend
|
|||||||
tests/ @goauthentik/backend
|
tests/ @goauthentik/backend
|
||||||
pyproject.toml @goauthentik/backend
|
pyproject.toml @goauthentik/backend
|
||||||
poetry.lock @goauthentik/backend
|
poetry.lock @goauthentik/backend
|
||||||
go.mod @goauthentik/backend
|
|
||||||
go.sum @goauthentik/backend
|
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
.github/ @goauthentik/infrastructure
|
.github/ @goauthentik/infrastructure
|
||||||
Dockerfile @goauthentik/infrastructure
|
Dockerfile @goauthentik/infrastructure
|
||||||
|
12
Dockerfile
12
Dockerfile
@ -37,7 +37,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
|||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.22.0-bookworm AS go-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
|
||||||
|
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
@ -69,9 +69,9 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
|||||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
||||||
|
|
||||||
# Stage 4: MaxMind GeoIP
|
# Stage 4: MaxMind GeoIP
|
||||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.1 as geoip
|
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
ENV GEOIPUPDATE_VERBOSE="true"
|
||||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||||
@ -83,7 +83,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM docker.io/python:3.12.2-slim-bookworm AS python-deps
|
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||||
|
|
||||||
WORKDIR /ak-root/poetry
|
WORKDIR /ak-root/poetry
|
||||||
|
|
||||||
@ -108,7 +108,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||||||
poetry install --only=main --no-ansi --no-interaction
|
poetry install --only=main --no-ansi --no-interaction
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.12.2-slim-bookworm AS final-image
|
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||||
|
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
@ -125,7 +125,7 @@ WORKDIR /
|
|||||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \
|
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
|
80
Makefile
80
Makefile
@ -8,9 +8,6 @@ NPM_VERSION = $(shell python -m scripts.npm_version)
|
|||||||
PY_SOURCES = authentik tests scripts lifecycle
|
PY_SOURCES = authentik tests scripts lifecycle
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
GEN_API_TS = "gen-ts-api"
|
|
||||||
GEN_API_GO = "gen-go-api"
|
|
||||||
|
|
||||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||||
@ -61,7 +58,7 @@ test: ## Run the server tests and produce a coverage report (locally)
|
|||||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||||
isort $(PY_SOURCES)
|
isort $(PY_SOURCES)
|
||||||
black $(PY_SOURCES)
|
black $(PY_SOURCES)
|
||||||
ruff --fix $(PY_SOURCES)
|
ruff $(PY_SOURCES)
|
||||||
codespell -w $(CODESPELL_ARGS)
|
codespell -w $(CODESPELL_ARGS)
|
||||||
|
|
||||||
lint: ## Lint the python and golang sources
|
lint: ## Lint the python and golang sources
|
||||||
@ -70,26 +67,16 @@ lint: ## Lint the python and golang sources
|
|||||||
pylint $(PY_SOURCES)
|
pylint $(PY_SOURCES)
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
core-install:
|
|
||||||
poetry install
|
|
||||||
|
|
||||||
migrate: ## Run the Authentik Django server's migrations
|
migrate: ## Run the Authentik Django server's migrations
|
||||||
python -m lifecycle.migrate
|
python -m lifecycle.migrate
|
||||||
|
|
||||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||||
|
|
||||||
core-i18n-extract:
|
i18n-extract-core:
|
||||||
ak makemessages \
|
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||||
--add-location file \
|
|
||||||
--no-obsolete \
|
|
||||||
--ignore web \
|
|
||||||
--ignore internal \
|
|
||||||
--ignore ${GEN_API_TS} \
|
|
||||||
--ignore ${GEN_API_GO} \
|
|
||||||
--ignore website \
|
|
||||||
-l en
|
|
||||||
|
|
||||||
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core`
|
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||||
|
poetry install
|
||||||
|
|
||||||
dev-drop-db:
|
dev-drop-db:
|
||||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
@ -107,14 +94,8 @@ dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik
|
|||||||
#########################
|
#########################
|
||||||
|
|
||||||
gen-build: ## Extract the schema from the database
|
gen-build: ## Extract the schema from the database
|
||||||
AUTHENTIK_DEBUG=true \
|
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||||
AUTHENTIK_TENANTS__ENABLED=true \
|
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
|
||||||
ak make_blueprint_schema > blueprints/schema.json
|
|
||||||
AUTHENTIK_DEBUG=true \
|
|
||||||
AUTHENTIK_TENANTS__ENABLED=true \
|
|
||||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
|
||||||
ak spectacular --file schema.yml
|
|
||||||
|
|
||||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||||
@ -125,60 +106,53 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
|||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-diff:2.1.0-beta.8 \
|
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
||||||
--markdown /local/diff.md \
|
--markdown /local/diff.md \
|
||||||
/local/old_schema.yml /local/schema.yml
|
/local/old_schema.yml /local/schema.yml
|
||||||
rm old_schema.yml
|
rm old_schema.yml
|
||||||
sed -i 's/{/{/g' diff.md
|
|
||||||
sed -i 's/}/}/g' diff.md
|
|
||||||
npx prettier --write diff.md
|
npx prettier --write diff.md
|
||||||
|
|
||||||
gen-clean-ts: ## Remove generated API client for Typescript
|
gen-clean:
|
||||||
rm -rf ./${GEN_API_TS}/
|
rm -rf web/api/src/
|
||||||
rm -rf ./web/node_modules/@goauthentik/api/
|
rm -rf api/
|
||||||
|
|
||||||
gen-clean-go: ## Remove generated API client for Go
|
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||||
rm -rf ./${GEN_API_GO}/
|
|
||||||
|
|
||||||
gen-clean: gen-clean-ts gen-clean-go ## Remove generated API clients
|
|
||||||
|
|
||||||
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
|
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g typescript-fetch \
|
-g typescript-fetch \
|
||||||
-o /local/${GEN_API_TS} \
|
-o /local/gen-ts-api \
|
||||||
-c /local/scripts/api-ts-config.yaml \
|
-c /local/scripts/api-ts-config.yaml \
|
||||||
--additional-properties=npmVersion=${NPM_VERSION} \
|
--additional-properties=npmVersion=${NPM_VERSION} \
|
||||||
--git-repo-id authentik \
|
--git-repo-id authentik \
|
||||||
--git-user-id goauthentik
|
--git-user-id goauthentik
|
||||||
mkdir -p web/node_modules/@goauthentik/api
|
mkdir -p web/node_modules/@goauthentik/api
|
||||||
cd ./${GEN_API_TS} && npm i
|
cd gen-ts-api && npm i
|
||||||
\cp -rf ./${GEN_API_TS}/* web/node_modules/@goauthentik/api
|
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
gen-client-go: ## Build and install the authentik API for Golang
|
||||||
mkdir -p ./${GEN_API_GO} ./${GEN_API_GO}/templates
|
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./${GEN_API_GO}/config.yaml
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./${GEN_API_GO}/templates/README.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./${GEN_API_GO}/templates/go.mod.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
||||||
cp schema.yml ./${GEN_API_GO}/
|
cp schema.yml ./gen-go-api/
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}/${GEN_API_GO}:/local \
|
--rm -v ${PWD}/gen-go-api:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g go \
|
-g go \
|
||||||
-o /local/ \
|
-o /local/ \
|
||||||
-c /local/config.yaml
|
-c /local/config.yaml
|
||||||
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
|
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||||
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||||
|
|
||||||
gen-dev-config: ## Generate a local development config file
|
gen-dev-config: ## Generate a local development config file
|
||||||
python -m scripts.generate_config
|
python -m scripts.generate_config
|
||||||
|
|
||||||
gen: gen-build gen-client-ts
|
gen: gen-build gen-clean gen-client-ts
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## Web
|
## Web
|
||||||
@ -187,7 +161,7 @@ gen: gen-build gen-client-ts
|
|||||||
web-build: web-install ## Build the Authentik UI
|
web-build: web-install ## Build the Authentik UI
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||||
|
|
||||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||||
|
|
||||||
## Independent audits and pentests
|
|
||||||
|
|
||||||
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
|
|
||||||
|
|
||||||
## What authentik classifies as a CVE
|
## What authentik classifies as a CVE
|
||||||
|
|
||||||
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
"""authentik root module"""
|
"""authentik root module"""
|
||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2024.2.0"
|
__version__ = "2023.10.7"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Meta API"""
|
"""Meta API"""
|
||||||
|
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik administration metrics"""
|
"""authentik administration metrics"""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.db.models.functions import ExtractHour
|
from django.db.models.functions import ExtractHour
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sys import version as python_version
|
from sys import version as python_version
|
||||||
@ -14,7 +13,6 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
@ -32,16 +30,15 @@ class RuntimeDict(TypedDict):
|
|||||||
uname: str
|
uname: str
|
||||||
|
|
||||||
|
|
||||||
class SystemInfoSerializer(PassiveSerializer):
|
class SystemSerializer(PassiveSerializer):
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
|
|
||||||
http_headers = SerializerMethodField()
|
http_headers = SerializerMethodField()
|
||||||
http_host = SerializerMethodField()
|
http_host = SerializerMethodField()
|
||||||
http_is_secure = SerializerMethodField()
|
http_is_secure = SerializerMethodField()
|
||||||
runtime = SerializerMethodField()
|
runtime = SerializerMethodField()
|
||||||
brand = SerializerMethodField()
|
tenant = SerializerMethodField()
|
||||||
server_time = SerializerMethodField()
|
server_time = SerializerMethodField()
|
||||||
embedded_outpost_disabled = SerializerMethodField()
|
|
||||||
embedded_outpost_host = SerializerMethodField()
|
embedded_outpost_host = SerializerMethodField()
|
||||||
|
|
||||||
def get_http_headers(self, request: Request) -> dict[str, str]:
|
def get_http_headers(self, request: Request) -> dict[str, str]:
|
||||||
@ -72,18 +69,14 @@ class SystemInfoSerializer(PassiveSerializer):
|
|||||||
"uname": " ".join(platform.uname()),
|
"uname": " ".join(platform.uname()),
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_brand(self, request: Request) -> str:
|
def get_tenant(self, request: Request) -> str:
|
||||||
"""Currently active brand"""
|
"""Currently active tenant"""
|
||||||
return str(request._request.brand)
|
return str(request._request.tenant)
|
||||||
|
|
||||||
def get_server_time(self, request: Request) -> datetime:
|
def get_server_time(self, request: Request) -> datetime:
|
||||||
"""Current server time"""
|
"""Current server time"""
|
||||||
return now()
|
return now()
|
||||||
|
|
||||||
def get_embedded_outpost_disabled(self, request: Request) -> bool:
|
|
||||||
"""Whether the embedded outpost is disabled"""
|
|
||||||
return CONFIG.get_bool("outposts.disable_embedded_outpost", False)
|
|
||||||
|
|
||||||
def get_embedded_outpost_host(self, request: Request) -> str:
|
def get_embedded_outpost_host(self, request: Request) -> str:
|
||||||
"""Get the FQDN configured on the embedded outpost"""
|
"""Get the FQDN configured on the embedded outpost"""
|
||||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||||
@ -98,14 +91,14 @@ class SystemView(APIView):
|
|||||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||||
pagination_class = None
|
pagination_class = None
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
serializer_class = SystemInfoSerializer
|
serializer_class = SystemSerializer
|
||||||
|
|
||||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
return Response(SystemInfoSerializer(request).data)
|
return Response(SystemSerializer(request).data)
|
||||||
|
|
||||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||||
def post(self, request: Request) -> Response:
|
def post(self, request: Request) -> Response:
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
return Response(SystemInfoSerializer(request).data)
|
return Response(SystemSerializer(request).data)
|
||||||
|
134
authentik/admin/api/tasks.py
Normal file
134
authentik/admin/api/tasks.py
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
"""Tasks API"""
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
from django.contrib import messages
|
||||||
|
from django.http.response import Http404
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.fields import (
|
||||||
|
CharField,
|
||||||
|
ChoiceField,
|
||||||
|
DateTimeField,
|
||||||
|
ListField,
|
||||||
|
SerializerMethodField,
|
||||||
|
)
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.viewsets import ViewSet
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.api.decorators import permission_required
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
|
from authentik.rbac.permissions import HasPermission
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class TaskSerializer(PassiveSerializer):
|
||||||
|
"""Serialize TaskInfo and TaskResult"""
|
||||||
|
|
||||||
|
task_name = CharField()
|
||||||
|
task_description = CharField()
|
||||||
|
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||||
|
task_duration = SerializerMethodField()
|
||||||
|
|
||||||
|
status = ChoiceField(
|
||||||
|
source="result.status.name",
|
||||||
|
choices=[(x.name, x.name) for x in TaskResultStatus],
|
||||||
|
)
|
||||||
|
messages = ListField(source="result.messages")
|
||||||
|
|
||||||
|
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||||
|
"""Get the duration a task took to run"""
|
||||||
|
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||||
|
|
||||||
|
def to_representation(self, instance: TaskInfo):
|
||||||
|
"""When a new version of authentik adds fields to TaskInfo,
|
||||||
|
the API will fail with an AttributeError, as the classes
|
||||||
|
are pickled in cache. In that case, just delete the info"""
|
||||||
|
try:
|
||||||
|
return super().to_representation(instance)
|
||||||
|
# pylint: disable=broad-except
|
||||||
|
except Exception: # pragma: no cover
|
||||||
|
if isinstance(self.instance, list):
|
||||||
|
for inst in self.instance:
|
||||||
|
inst.delete()
|
||||||
|
else:
|
||||||
|
self.instance.delete()
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class TaskViewSet(ViewSet):
|
||||||
|
"""Read-only view set that returns all background tasks"""
|
||||||
|
|
||||||
|
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
|
||||||
|
serializer_class = TaskSerializer
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
responses={
|
||||||
|
200: TaskSerializer(many=False),
|
||||||
|
404: OpenApiResponse(description="Task not found"),
|
||||||
|
},
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
"id",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.PATH,
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def retrieve(self, request: Request, pk=None) -> Response:
|
||||||
|
"""Get a single system task"""
|
||||||
|
task = TaskInfo.by_name(pk)
|
||||||
|
if not task:
|
||||||
|
raise Http404
|
||||||
|
return Response(TaskSerializer(task, many=False).data)
|
||||||
|
|
||||||
|
@extend_schema(responses={200: TaskSerializer(many=True)})
|
||||||
|
def list(self, request: Request) -> Response:
|
||||||
|
"""List system tasks"""
|
||||||
|
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
||||||
|
return Response(TaskSerializer(tasks, many=True).data)
|
||||||
|
|
||||||
|
@permission_required(None, ["authentik_rbac.run_system_tasks"])
|
||||||
|
@extend_schema(
|
||||||
|
request=OpenApiTypes.NONE,
|
||||||
|
responses={
|
||||||
|
204: OpenApiResponse(description="Task retried successfully"),
|
||||||
|
404: OpenApiResponse(description="Task not found"),
|
||||||
|
500: OpenApiResponse(description="Failed to retry task"),
|
||||||
|
},
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
"id",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.PATH,
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def retry(self, request: Request, pk=None) -> Response:
|
||||||
|
"""Retry task"""
|
||||||
|
task = TaskInfo.by_name(pk)
|
||||||
|
if not task:
|
||||||
|
raise Http404
|
||||||
|
try:
|
||||||
|
task_module = import_module(task.task_call_module)
|
||||||
|
task_func = getattr(task_module, task.task_call_func)
|
||||||
|
LOGGER.debug("Running task", task=task_func)
|
||||||
|
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||||
|
messages.success(
|
||||||
|
self.request,
|
||||||
|
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||||
|
)
|
||||||
|
return Response(status=204)
|
||||||
|
except (ImportError, AttributeError): # pragma: no cover
|
||||||
|
LOGGER.warning("Failed to run task, remove state", task=task)
|
||||||
|
# if we get an import error, the module path has probably changed
|
||||||
|
task.delete()
|
||||||
|
return Response(status=500)
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from packaging.version import parse
|
from packaging.version import parse
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik administration overview"""
|
"""authentik administration overview"""
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.fields import IntegerField
|
from rest_framework.fields import IntegerField
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik admin app config"""
|
"""authentik admin app config"""
|
||||||
|
|
||||||
from prometheus_client import Gauge, Info
|
from prometheus_client import Gauge, Info
|
||||||
|
|
||||||
from authentik.blueprints.apps import ManagedAppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
@ -15,3 +14,7 @@ class AuthentikAdminConfig(ManagedAppConfig):
|
|||||||
label = "authentik_admin"
|
label = "authentik_admin"
|
||||||
verbose_name = "authentik Admin"
|
verbose_name = "authentik Admin"
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
|
def reconcile_load_admin_signals(self):
|
||||||
|
"""Load admin signals"""
|
||||||
|
self.import_module("authentik.admin.signals")
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik admin settings"""
|
"""authentik admin settings"""
|
||||||
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
|
|
||||||
from authentik.lib.utils.time import fqdn_rand
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""admin signals"""
|
"""admin signals"""
|
||||||
|
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
|
from authentik.admin.api.tasks import TaskInfo
|
||||||
from authentik.admin.apps import GAUGE_WORKERS
|
from authentik.admin.apps import GAUGE_WORKERS
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
from authentik.root.monitoring import monitoring_set
|
from authentik.root.monitoring import monitoring_set
|
||||||
@ -12,3 +12,10 @@ def monitoring_set_workers(sender, **kwargs):
|
|||||||
"""Set worker gauge"""
|
"""Set worker gauge"""
|
||||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||||
GAUGE_WORKERS.set(count)
|
GAUGE_WORKERS.set(count)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(monitoring_set)
|
||||||
|
def monitoring_set_tasks(sender, **kwargs):
|
||||||
|
"""Set task gauges"""
|
||||||
|
for task in TaskInfo.all().values():
|
||||||
|
task.update_metrics()
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik admin tasks"""
|
"""authentik admin tasks"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
@ -12,7 +11,12 @@ from structlog.stdlib import get_logger
|
|||||||
from authentik import __version__, get_build_hash
|
from authentik import __version__, get_build_hash
|
||||||
from authentik.admin.apps import PROM_INFO
|
from authentik.admin.apps import PROM_INFO
|
||||||
from authentik.events.models import Event, EventAction, Notification
|
from authentik.events.models import Event, EventAction, Notification
|
||||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
from authentik.events.monitored_tasks import (
|
||||||
|
MonitoredTask,
|
||||||
|
TaskResult,
|
||||||
|
TaskResultStatus,
|
||||||
|
prefill_task,
|
||||||
|
)
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.http import get_http_session
|
from authentik.lib.utils.http import get_http_session
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
@ -50,13 +54,13 @@ def clear_update_notifications():
|
|||||||
notification.delete()
|
notification.delete()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def update_latest_version(self: SystemTask):
|
def update_latest_version(self: MonitoredTask):
|
||||||
"""Update latest version info"""
|
"""Update latest version info"""
|
||||||
if CONFIG.get_bool("disable_update_check"):
|
if CONFIG.get_bool("disable_update_check"):
|
||||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(TaskStatus.WARNING, "Version check disabled.")
|
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
response = get_http_session().get(
|
response = get_http_session().get(
|
||||||
@ -66,7 +70,9 @@ def update_latest_version(self: SystemTask):
|
|||||||
data = response.json()
|
data = response.json()
|
||||||
upstream_version = data.get("stable", {}).get("version")
|
upstream_version = data.get("stable", {}).get("version")
|
||||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
|
self.set_status(
|
||||||
|
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
||||||
|
)
|
||||||
_set_prom_info()
|
_set_prom_info()
|
||||||
# Check if upstream version is newer than what we're running,
|
# Check if upstream version is newer than what we're running,
|
||||||
# and if no event exists yet, create one.
|
# and if no event exists yet, create one.
|
||||||
@ -83,7 +89,7 @@ def update_latest_version(self: SystemTask):
|
|||||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||||
except (RequestException, IndexError) as exc:
|
except (RequestException, IndexError) as exc:
|
||||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||||
self.set_error(exc)
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
|
|
||||||
|
|
||||||
_set_prom_info()
|
_set_prom_info()
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""test admin api"""
|
"""test admin api"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@ -8,6 +7,8 @@ from django.urls import reverse
|
|||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.blueprints.tests import reconcile_app
|
from authentik.blueprints.tests import reconcile_app
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.core.tasks import clean_expired_models
|
||||||
|
from authentik.events.monitored_tasks import TaskResultStatus
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
@ -22,6 +23,53 @@ class TestAdminAPI(TestCase):
|
|||||||
self.group.save()
|
self.group.save()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
|
def test_tasks(self):
|
||||||
|
"""Test Task API"""
|
||||||
|
clean_expired_models.delay()
|
||||||
|
response = self.client.get(reverse("authentik_api:admin_system_tasks-list"))
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content)
|
||||||
|
self.assertTrue(any(task["task_name"] == "clean_expired_models" for task in body))
|
||||||
|
|
||||||
|
def test_tasks_single(self):
|
||||||
|
"""Test Task API (read single)"""
|
||||||
|
clean_expired_models.delay()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:admin_system_tasks-detail",
|
||||||
|
kwargs={"pk": "clean_expired_models"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content)
|
||||||
|
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
|
||||||
|
self.assertEqual(body["task_name"], "clean_expired_models")
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 404)
|
||||||
|
|
||||||
|
def test_tasks_retry(self):
|
||||||
|
"""Test Task API (retry)"""
|
||||||
|
clean_expired_models.delay()
|
||||||
|
response = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:admin_system_tasks-retry",
|
||||||
|
kwargs={"pk": "clean_expired_models"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 204)
|
||||||
|
|
||||||
|
def test_tasks_retry_404(self):
|
||||||
|
"""Test Task API (retry, 404)"""
|
||||||
|
response = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:admin_system_tasks-retry",
|
||||||
|
kwargs={"pk": "qwerqewrqrqewrqewr"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 404)
|
||||||
|
|
||||||
def test_version(self):
|
def test_version(self):
|
||||||
"""Test Version API"""
|
"""Test Version API"""
|
||||||
response = self.client.get(reverse("authentik_api:admin_version"))
|
response = self.client.get(reverse("authentik_api:admin_version"))
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""test admin tasks"""
|
"""test admin tasks"""
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||||
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
from authentik.admin.api.metrics import AdministrationMetricsViewSet
|
||||||
from authentik.admin.api.system import SystemView
|
from authentik.admin.api.system import SystemView
|
||||||
|
from authentik.admin.api.tasks import TaskViewSet
|
||||||
from authentik.admin.api.version import VersionView
|
from authentik.admin.api.version import VersionView
|
||||||
from authentik.admin.api.workers import WorkerView
|
from authentik.admin.api.workers import WorkerView
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
|
("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
|
||||||
("admin/apps", AppsViewSet, "apps"),
|
("admin/apps", AppsViewSet, "apps"),
|
||||||
("admin/models", ModelViewSet, "models"),
|
("admin/models", ModelViewSet, "models"),
|
||||||
path(
|
path(
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""API Authentication"""
|
"""API Authentication"""
|
||||||
|
|
||||||
from hmac import compare_digest
|
from hmac import compare_digest
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""API Authorization"""
|
"""API Authorization"""
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""API Decorators"""
|
"""API Decorators"""
|
||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Pagination which includes total pages and current page"""
|
"""Pagination which includes total pages and current page"""
|
||||||
|
|
||||||
from rest_framework import pagination
|
from rest_framework import pagination
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.generators import SchemaGenerator
|
from drf_spectacular.generators import SchemaGenerator
|
||||||
from drf_spectacular.plumbing import (
|
from drf_spectacular.plumbing import (
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
{% block title %}
|
{% block title %}
|
||||||
API Browser - {{ brand.branding_title }}
|
API Browser - {{ tenant.branding_title }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test API Authentication"""
|
"""Test API Authentication"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
@ -13,8 +12,6 @@ from authentik.blueprints.tests import reconcile_app
|
|||||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
|
||||||
from authentik.outposts.models import Outpost
|
|
||||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||||
|
|
||||||
@ -52,12 +49,8 @@ class TestAPIAuth(TestCase):
|
|||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
bearer_auth(f"Bearer {token.key}".encode())
|
bearer_auth(f"Bearer {token.key}".encode())
|
||||||
|
|
||||||
@reconcile_app("authentik_outposts")
|
def test_managed_outpost(self):
|
||||||
def test_managed_outpost_fail(self):
|
|
||||||
"""Test managed outpost"""
|
"""Test managed outpost"""
|
||||||
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
|
||||||
outpost.user.delete()
|
|
||||||
outpost.delete()
|
|
||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test config API"""
|
"""Test config API"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""test decorators api"""
|
"""test decorators api"""
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Schema generation tests"""
|
"""Schema generation tests"""
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
from yaml import safe_load
|
from yaml import safe_load
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik API Modelviewset tests"""
|
"""authentik API Modelviewset tests"""
|
||||||
|
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik api urls"""
|
"""authentik api urls"""
|
||||||
|
|
||||||
from django.urls import include, path
|
from django.urls import include, path
|
||||||
|
|
||||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""core Configs API"""
|
"""core Configs API"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -20,7 +19,7 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.context_processors.base import get_context_processors
|
from authentik.events.geo import GEOIP_READER
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
capabilities = Signal()
|
capabilities = Signal()
|
||||||
@ -31,7 +30,6 @@ class Capabilities(models.TextChoices):
|
|||||||
|
|
||||||
CAN_SAVE_MEDIA = "can_save_media"
|
CAN_SAVE_MEDIA = "can_save_media"
|
||||||
CAN_GEO_IP = "can_geo_ip"
|
CAN_GEO_IP = "can_geo_ip"
|
||||||
CAN_ASN = "can_asn"
|
|
||||||
CAN_IMPERSONATE = "can_impersonate"
|
CAN_IMPERSONATE = "can_impersonate"
|
||||||
CAN_DEBUG = "can_debug"
|
CAN_DEBUG = "can_debug"
|
||||||
IS_ENTERPRISE = "is_enterprise"
|
IS_ENTERPRISE = "is_enterprise"
|
||||||
@ -70,10 +68,9 @@ class ConfigView(APIView):
|
|||||||
deb_test = settings.DEBUG or settings.TEST
|
deb_test = settings.DEBUG or settings.TEST
|
||||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
||||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||||
for processor in get_context_processors():
|
if GEOIP_READER.enabled:
|
||||||
if cap := processor.capability():
|
caps.append(Capabilities.CAN_GEO_IP)
|
||||||
caps.append(cap)
|
if CONFIG.get_bool("impersonation"):
|
||||||
if self.request.tenant.impersonation:
|
|
||||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||||
if settings.DEBUG: # pragma: no cover
|
if settings.DEBUG: # pragma: no cover
|
||||||
caps.append(Capabilities.CAN_DEBUG)
|
caps.append(Capabilities.CAN_DEBUG)
|
||||||
@ -96,10 +93,10 @@ class ConfigView(APIView):
|
|||||||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||||
},
|
},
|
||||||
"capabilities": self.get_capabilities(),
|
"capabilities": self.get_capabilities(),
|
||||||
"cache_timeout": CONFIG.get_int("cache.timeout"),
|
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
||||||
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
|
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
||||||
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
|
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
||||||
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
|
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""api v3 urls"""
|
"""api v3 urls"""
|
||||||
|
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""General API Views"""
|
"""General API Views"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
"""Serializer mixin for managed models"""
|
"""Serializer mixin for managed models"""
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField, DateTimeField
|
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||||
@ -16,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer
|
|||||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
|
||||||
|
|
||||||
class ManagedSerializer:
|
class ManagedSerializer:
|
||||||
@ -29,7 +28,7 @@ class MetadataSerializer(PassiveSerializer):
|
|||||||
"""Serializer for blueprint metadata"""
|
"""Serializer for blueprint metadata"""
|
||||||
|
|
||||||
name = CharField()
|
name = CharField()
|
||||||
labels = JSONDictField()
|
labels = JSONField()
|
||||||
|
|
||||||
|
|
||||||
class BlueprintInstanceSerializer(ModelSerializer):
|
class BlueprintInstanceSerializer(ModelSerializer):
|
||||||
|
@ -11,42 +11,23 @@ from structlog.stdlib import BoundLogger, get_logger
|
|||||||
class ManagedAppConfig(AppConfig):
|
class ManagedAppConfig(AppConfig):
|
||||||
"""Basic reconciliation logic for apps"""
|
"""Basic reconciliation logic for apps"""
|
||||||
|
|
||||||
logger: BoundLogger
|
_logger: BoundLogger
|
||||||
|
|
||||||
RECONCILE_GLOBAL_PREFIX: str = "reconcile_global_"
|
|
||||||
RECONCILE_TENANT_PREFIX: str = "reconcile_tenant_"
|
|
||||||
|
|
||||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||||
super().__init__(app_name, *args, **kwargs)
|
super().__init__(app_name, *args, **kwargs)
|
||||||
self.logger = get_logger().bind(app_name=app_name)
|
self._logger = get_logger().bind(app_name=app_name)
|
||||||
|
|
||||||
def ready(self) -> None:
|
def ready(self) -> None:
|
||||||
self.import_related()
|
self.reconcile()
|
||||||
self.reconcile_global()
|
|
||||||
self.reconcile_tenant()
|
|
||||||
return super().ready()
|
return super().ready()
|
||||||
|
|
||||||
def import_related(self):
|
|
||||||
"""Automatically import related modules which rely on just being imported
|
|
||||||
to register themselves (mainly django signals and celery tasks)"""
|
|
||||||
|
|
||||||
def import_relative(rel_module: str):
|
|
||||||
try:
|
|
||||||
module_name = f"{self.name}.{rel_module}"
|
|
||||||
import_module(module_name)
|
|
||||||
self.logger.info("Imported related module", module=module_name)
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import_relative("checks")
|
|
||||||
import_relative("tasks")
|
|
||||||
import_relative("signals")
|
|
||||||
|
|
||||||
def import_module(self, path: str):
|
def import_module(self, path: str):
|
||||||
"""Load module"""
|
"""Load module"""
|
||||||
import_module(path)
|
import_module(path)
|
||||||
|
|
||||||
def _reconcile(self, prefix: str) -> None:
|
def reconcile(self) -> None:
|
||||||
|
"""reconcile ourselves"""
|
||||||
|
prefix = "reconcile_"
|
||||||
for meth_name in dir(self):
|
for meth_name in dir(self):
|
||||||
meth = getattr(self, meth_name)
|
meth = getattr(self, meth_name)
|
||||||
if not ismethod(meth):
|
if not ismethod(meth):
|
||||||
@ -55,34 +36,11 @@ class ManagedAppConfig(AppConfig):
|
|||||||
continue
|
continue
|
||||||
name = meth_name.replace(prefix, "")
|
name = meth_name.replace(prefix, "")
|
||||||
try:
|
try:
|
||||||
self.logger.debug("Starting reconciler", name=name)
|
self._logger.debug("Starting reconciler", name=name)
|
||||||
meth()
|
meth()
|
||||||
self.logger.debug("Successfully reconciled", name=name)
|
self._logger.debug("Successfully reconciled", name=name)
|
||||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||||
self.logger.warning("Failed to run reconcile", name=name, exc=exc)
|
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||||
|
|
||||||
def reconcile_tenant(self) -> None:
|
|
||||||
"""reconcile ourselves for tenanted methods"""
|
|
||||||
from authentik.tenants.models import Tenant
|
|
||||||
|
|
||||||
try:
|
|
||||||
tenants = list(Tenant.objects.filter(ready=True))
|
|
||||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
|
||||||
self.logger.debug("Failed to get tenants to run reconcile", exc=exc)
|
|
||||||
return
|
|
||||||
for tenant in tenants:
|
|
||||||
with tenant:
|
|
||||||
self._reconcile(self.RECONCILE_TENANT_PREFIX)
|
|
||||||
|
|
||||||
def reconcile_global(self) -> None:
|
|
||||||
"""
|
|
||||||
reconcile ourselves for global methods.
|
|
||||||
Used for signals, tasks, etc. Database queries should not be made in here.
|
|
||||||
"""
|
|
||||||
from django_tenants.utils import get_public_schema_name, schema_context
|
|
||||||
|
|
||||||
with schema_context(get_public_schema_name()):
|
|
||||||
self._reconcile(self.RECONCILE_GLOBAL_PREFIX)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||||
@ -93,11 +51,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
|||||||
verbose_name = "authentik Blueprints"
|
verbose_name = "authentik Blueprints"
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
def reconcile_global_load_blueprints_v1_tasks(self):
|
def reconcile_load_blueprints_v1_tasks(self):
|
||||||
"""Load v1 tasks"""
|
"""Load v1 tasks"""
|
||||||
self.import_module("authentik.blueprints.v1.tasks")
|
self.import_module("authentik.blueprints.v1.tasks")
|
||||||
|
|
||||||
def reconcile_tenant_blueprints_discovery(self):
|
def reconcile_blueprints_discovery(self):
|
||||||
"""Run blueprint discovery"""
|
"""Run blueprint discovery"""
|
||||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Apply blueprint from commandline"""
|
"""Apply blueprint from commandline"""
|
||||||
|
|
||||||
from sys import exit as sys_exit
|
from sys import exit as sys_exit
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, no_translations
|
from django.core.management.base import BaseCommand, no_translations
|
||||||
@ -7,7 +6,6 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.tenants.models import Tenant
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -18,8 +16,6 @@ class Command(BaseCommand):
|
|||||||
@no_translations
|
@no_translations
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""Apply all blueprints in order, abort when one fails to import"""
|
"""Apply all blueprints in order, abort when one fails to import"""
|
||||||
for tenant in Tenant.objects.filter(ready=True):
|
|
||||||
with tenant:
|
|
||||||
for blueprint_path in options.get("blueprints", []):
|
for blueprint_path in options.get("blueprints", []):
|
||||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||||
importer = Importer.from_string(content)
|
importer = Importer.from_string(content)
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
|
from django.core.management.base import BaseCommand, no_translations
|
||||||
from django.core.management.base import no_translations
|
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.v1.exporter import Exporter
|
from authentik.blueprints.v1.exporter import Exporter
|
||||||
from authentik.tenants.management import TenantCommand
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class Command(TenantCommand):
|
class Command(BaseCommand):
|
||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
|
|
||||||
@no_translations
|
@no_translations
|
||||||
def handle_per_tenant(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""Export blueprint of current authentik install"""
|
"""Export blueprint of current authentik install"""
|
||||||
exporter = Exporter()
|
exporter = Exporter()
|
||||||
self.stdout.write(exporter.export_to_string())
|
self.stdout.write(exporter.export_to_string())
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Generate JSON Schema for blueprints"""
|
"""Generate JSON Schema for blueprints"""
|
||||||
|
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
|||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||||
"""Check if blueprint should be imported"""
|
"""Check if blueprint should be imported"""
|
||||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||||
@ -29,9 +29,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
|||||||
if version != 1:
|
if version != 1:
|
||||||
return
|
return
|
||||||
blueprint_file.seek(0)
|
blueprint_file.seek(0)
|
||||||
instance: BlueprintInstance = (
|
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||||
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
|
||||||
)
|
|
||||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||||
meta = None
|
meta = None
|
||||||
if metadata:
|
if metadata:
|
||||||
@ -39,7 +37,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
|||||||
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
if meta.labels.get(LABEL_AUTHENTIK_INSTANTIATE, "").lower() == "false":
|
||||||
return
|
return
|
||||||
if not instance:
|
if not instance:
|
||||||
BlueprintInstance.objects.using(db_alias).create(
|
instance = BlueprintInstance(
|
||||||
name=meta.name if meta else str(rel_path),
|
name=meta.name if meta else str(rel_path),
|
||||||
path=str(rel_path),
|
path=str(rel_path),
|
||||||
context={},
|
context={},
|
||||||
@ -49,6 +47,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
|||||||
last_applied_hash="",
|
last_applied_hash="",
|
||||||
metadata=metadata or {},
|
metadata=metadata or {},
|
||||||
)
|
)
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
@ -57,7 +56,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||||
check_blueprint_v1_file(BlueprintInstance, db_alias, Path(file))
|
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||||
|
|
||||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||||
# If we already have flows (and we should always run before flow migrations)
|
# If we already have flows (and we should always run before flow migrations)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""blueprint models"""
|
"""blueprint models"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""blueprint Settings"""
|
"""blueprint Settings"""
|
||||||
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
|
|
||||||
from authentik.lib.utils.time import fqdn_rand
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Blueprint helpers"""
|
"""Blueprint helpers"""
|
||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
@ -39,7 +38,7 @@ def reconcile_app(app_name: str):
|
|||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
config = apps.get_app_config(app_name)
|
config = apps.get_app_config(app_name)
|
||||||
if isinstance(config, ManagedAppConfig):
|
if isinstance(config, ManagedAppConfig):
|
||||||
config.ready()
|
config.reconcile()
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik managed models tests"""
|
"""authentik managed models tests"""
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints OCI"""
|
"""Test blueprints OCI"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""test packaged blueprints"""
|
"""test packaged blueprints"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
@ -8,16 +7,16 @@ from django.test import TransactionTestCase
|
|||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.blueprints.tests import apply_blueprint
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.brands.models import Brand
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
|
|
||||||
class TestPackaged(TransactionTestCase):
|
class TestPackaged(TransactionTestCase):
|
||||||
"""Empty class, test methods are added dynamically"""
|
"""Empty class, test methods are added dynamically"""
|
||||||
|
|
||||||
@apply_blueprint("default/default-brand.yaml")
|
@apply_blueprint("default/default-tenant.yaml")
|
||||||
def test_decorator_static(self):
|
def test_decorator_static(self):
|
||||||
"""Test @apply_blueprint decorator"""
|
"""Test @apply_blueprint decorator"""
|
||||||
self.assertTrue(Brand.objects.filter(domain="authentik-default").exists())
|
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||||
|
|
||||||
|
|
||||||
def blueprint_tester(file_name: Path) -> Callable:
|
def blueprint_tester(file_name: Path) -> Callable:
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""authentik managed models tests"""
|
"""authentik managed models tests"""
|
||||||
|
|
||||||
from typing import Callable, Type
|
from typing import Callable, Type
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1 api"""
|
"""Test blueprints v1 api"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from tempfile import NamedTemporaryFile, mkdtemp
|
from tempfile import NamedTemporaryFile, mkdtemp
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Test blueprints v1 tasks"""
|
"""Test blueprints v1 tasks"""
|
||||||
|
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from tempfile import NamedTemporaryFile, mkdtemp
|
from tempfile import NamedTemporaryFile, mkdtemp
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
|
|
||||||
from authentik.blueprints.api import BlueprintInstanceViewSet
|
from authentik.blueprints.api import BlueprintInstanceViewSet
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""transfer common classes"""
|
"""transfer common classes"""
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Blueprint exporter"""
|
"""Blueprint exporter"""
|
||||||
|
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
@ -8,6 +7,7 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.db.models import Model, Q, QuerySet
|
from django.db.models import Model, Q, QuerySet
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from yaml import dump
|
from yaml import dump
|
||||||
|
|
||||||
from authentik.blueprints.v1.common import (
|
from authentik.blueprints.v1.common import (
|
||||||
@ -48,7 +48,7 @@ class Exporter:
|
|||||||
"""Return a queryset for `model`. Can be used to filter some
|
"""Return a queryset for `model`. Can be used to filter some
|
||||||
objects on some models"""
|
objects on some models"""
|
||||||
if model == get_user_model():
|
if model == get_user_model():
|
||||||
return model.objects.exclude_anonymous()
|
return model.objects.exclude(pk=get_anonymous_user().pk)
|
||||||
return model.objects.all()
|
return model.objects.all()
|
||||||
|
|
||||||
def _pre_export(self, blueprint: Blueprint):
|
def _pre_export(self, blueprint: Blueprint):
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Blueprint importer"""
|
"""Blueprint importer"""
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
@ -8,14 +7,11 @@ from dacite.config import Config
|
|||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.contrib.auth.models import Permission
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from guardian.models import UserObjectPermission
|
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
@ -39,20 +35,14 @@ from authentik.core.models import (
|
|||||||
Source,
|
Source,
|
||||||
UserSourceConnection,
|
UserSourceConnection,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.license import LicenseKey
|
|
||||||
from authentik.enterprise.models import LicenseUsage
|
from authentik.enterprise.models import LicenseUsage
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
|
||||||
from authentik.events.models import SystemTask
|
|
||||||
from authentik.events.utils import cleanse_dict
|
from authentik.events.utils import cleanse_dict
|
||||||
from authentik.flows.models import FlowToken, Stage
|
from authentik.flows.models import FlowToken, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.policies.reputation.models import Reputation
|
|
||||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||||
from authentik.tenants.models import Tenant
|
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
# Context set when the serializer is created in a blueprint context
|
||||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||||
@ -67,12 +57,8 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
from django.contrib.auth.models import User as DjangoUser
|
from django.contrib.auth.models import User as DjangoUser
|
||||||
|
|
||||||
return (
|
return (
|
||||||
# Django only classes
|
|
||||||
DjangoUser,
|
DjangoUser,
|
||||||
DjangoGroup,
|
DjangoGroup,
|
||||||
ContentType,
|
|
||||||
Permission,
|
|
||||||
UserObjectPermission,
|
|
||||||
# Base classes
|
# Base classes
|
||||||
Provider,
|
Provider,
|
||||||
Source,
|
Source,
|
||||||
@ -89,13 +75,6 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
LicenseUsage,
|
LicenseUsage,
|
||||||
SCIMGroup,
|
SCIMGroup,
|
||||||
SCIMUser,
|
SCIMUser,
|
||||||
Tenant,
|
|
||||||
SystemTask,
|
|
||||||
ConnectionToken,
|
|
||||||
AuthorizationCode,
|
|
||||||
AccessToken,
|
|
||||||
RefreshToken,
|
|
||||||
Reputation,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -129,16 +108,12 @@ class Importer:
|
|||||||
self.__pk_map: dict[Any, Model] = {}
|
self.__pk_map: dict[Any, Model] = {}
|
||||||
self._import = blueprint
|
self._import = blueprint
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
ctx = self.default_context()
|
ctx = {}
|
||||||
always_merger.merge(ctx, self._import.context)
|
always_merger.merge(ctx, self._import.context)
|
||||||
if context:
|
if context:
|
||||||
always_merger.merge(ctx, context)
|
always_merger.merge(ctx, context)
|
||||||
self._import.context = ctx
|
self._import.context = ctx
|
||||||
|
|
||||||
def default_context(self):
|
|
||||||
"""Default context"""
|
|
||||||
return {"goauthentik.io/enterprise/licensed": LicenseKey.get_total().is_valid()}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||||
"""Parse YAML string and create blueprint importer from it"""
|
"""Parse YAML string and create blueprint importer from it"""
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
"""Apply Blueprint meta model"""
|
"""Apply Blueprint meta model"""
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import BooleanField
|
from rest_framework.fields import BooleanField, JSONField
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
||||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
@ -18,7 +17,7 @@ LOGGER = get_logger()
|
|||||||
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||||
"""Serializer for meta apply blueprint model"""
|
"""Serializer for meta apply blueprint model"""
|
||||||
|
|
||||||
identifiers = JSONDictField()
|
identifiers = JSONField(validators=[is_dict])
|
||||||
required = BooleanField(default=True)
|
required = BooleanField(default=True)
|
||||||
|
|
||||||
# We cannot override `instance` as that will confuse rest_framework
|
# We cannot override `instance` as that will confuse rest_framework
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Base models"""
|
"""Base models"""
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""OCI Client"""
|
"""OCI Client"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from urllib.parse import ParseResult, urlparse
|
from urllib.parse import ParseResult, urlparse
|
||||||
|
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
"""v1 blueprints tasks"""
|
"""v1 blueprints tasks"""
|
||||||
|
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from sys import platform
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
@ -31,12 +29,15 @@ from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, E
|
|||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||||
from authentik.events.models import TaskStatus
|
from authentik.events.monitored_tasks import (
|
||||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
MonitoredTask,
|
||||||
|
TaskResult,
|
||||||
|
TaskResultStatus,
|
||||||
|
prefill_task,
|
||||||
|
)
|
||||||
from authentik.events.utils import sanitize_dict
|
from authentik.events.utils import sanitize_dict
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
from authentik.tenants.models import Tenant
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
_file_watcher_started = False
|
_file_watcher_started = False
|
||||||
@ -61,12 +62,7 @@ def start_blueprint_watcher():
|
|||||||
if _file_watcher_started:
|
if _file_watcher_started:
|
||||||
return
|
return
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
kwargs = {}
|
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
||||||
if platform.startswith("linux"):
|
|
||||||
kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
|
|
||||||
observer.schedule(
|
|
||||||
BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
|
|
||||||
)
|
|
||||||
observer.start()
|
observer.start()
|
||||||
_file_watcher_started = True
|
_file_watcher_started = True
|
||||||
|
|
||||||
@ -74,33 +70,18 @@ def start_blueprint_watcher():
|
|||||||
class BlueprintEventHandler(FileSystemEventHandler):
|
class BlueprintEventHandler(FileSystemEventHandler):
|
||||||
"""Event handler for blueprint events"""
|
"""Event handler for blueprint events"""
|
||||||
|
|
||||||
# We only ever get creation and modification events.
|
def on_any_event(self, event: FileSystemEvent):
|
||||||
# See the creation of the Observer instance above for the event filtering.
|
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||||
|
return
|
||||||
# Even though we filter to only get file events, we might still get
|
|
||||||
# directory events as some implementations such as inotify do not support
|
|
||||||
# filtering on file/directory.
|
|
||||||
|
|
||||||
def dispatch(self, event: FileSystemEvent) -> None:
|
|
||||||
"""Call specific event handler method. Ignores directory changes."""
|
|
||||||
if event.is_directory:
|
if event.is_directory:
|
||||||
return None
|
return
|
||||||
return super().dispatch(event)
|
|
||||||
|
|
||||||
def on_created(self, event: FileSystemEvent):
|
|
||||||
"""Process file creation"""
|
|
||||||
LOGGER.debug("new blueprint file created, starting discovery")
|
|
||||||
for tenant in Tenant.objects.filter(ready=True):
|
|
||||||
with tenant:
|
|
||||||
blueprints_discovery.delay()
|
|
||||||
|
|
||||||
def on_modified(self, event: FileSystemEvent):
|
|
||||||
"""Process file modification"""
|
|
||||||
path = Path(event.src_path)
|
|
||||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||||
|
path = Path(event.src_path).absolute()
|
||||||
rel_path = str(path.relative_to(root))
|
rel_path = str(path.relative_to(root))
|
||||||
for tenant in Tenant.objects.filter(ready=True):
|
if isinstance(event, FileCreatedEvent):
|
||||||
with tenant:
|
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
||||||
|
blueprints_discovery.delay(rel_path)
|
||||||
|
if isinstance(event, FileModifiedEvent):
|
||||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||||
apply_blueprint.delay(instance.pk.hex)
|
apply_blueprint.delay(instance.pk.hex)
|
||||||
@ -147,10 +128,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
|||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True
|
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||||
)
|
)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def blueprints_discovery(self: SystemTask, path: Optional[str] = None):
|
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
||||||
"""Find blueprints and check if they need to be created in the database"""
|
"""Find blueprints and check if they need to be created in the database"""
|
||||||
count = 0
|
count = 0
|
||||||
for blueprint in blueprints_find():
|
for blueprint in blueprints_find():
|
||||||
@ -159,7 +140,10 @@ def blueprints_discovery(self: SystemTask, path: Optional[str] = None):
|
|||||||
check_blueprint_v1_file(blueprint)
|
check_blueprint_v1_file(blueprint)
|
||||||
count += 1
|
count += 1
|
||||||
self.set_status(
|
self.set_status(
|
||||||
TaskStatus.SUCCESSFUL, _("Successfully imported %(count)d files." % {"count": count})
|
TaskResult(
|
||||||
|
TaskResultStatus.SUCCESSFUL,
|
||||||
|
messages=[_("Successfully imported %(count)d files." % {"count": count})],
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -192,9 +176,9 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
|||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
bind=True,
|
bind=True,
|
||||||
base=SystemTask,
|
base=MonitoredTask,
|
||||||
)
|
)
|
||||||
def apply_blueprint(self: SystemTask, instance_pk: str):
|
def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||||
"""Apply single blueprint"""
|
"""Apply single blueprint"""
|
||||||
self.save_on_success = False
|
self.save_on_success = False
|
||||||
instance: Optional[BlueprintInstance] = None
|
instance: Optional[BlueprintInstance] = None
|
||||||
@ -212,18 +196,18 @@ def apply_blueprint(self: SystemTask, instance_pk: str):
|
|||||||
if not valid:
|
if not valid:
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
instance.save()
|
instance.save()
|
||||||
self.set_status(TaskStatus.ERROR, *[x["event"] for x in logs])
|
self.set_status(TaskResult(TaskResultStatus.ERROR, [x["event"] for x in logs]))
|
||||||
return
|
return
|
||||||
applied = importer.apply()
|
applied = importer.apply()
|
||||||
if not applied:
|
if not applied:
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
instance.save()
|
instance.save()
|
||||||
self.set_status(TaskStatus.ERROR, "Failed to apply")
|
self.set_status(TaskResult(TaskResultStatus.ERROR, "Failed to apply"))
|
||||||
return
|
return
|
||||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||||
instance.last_applied_hash = file_hash
|
instance.last_applied_hash = file_hash
|
||||||
instance.last_applied = now()
|
instance.last_applied = now()
|
||||||
self.set_status(TaskStatus.SUCCESSFUL)
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||||
except (
|
except (
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
@ -234,7 +218,7 @@ def apply_blueprint(self: SystemTask, instance_pk: str):
|
|||||||
) as exc:
|
) as exc:
|
||||||
if instance:
|
if instance:
|
||||||
instance.status = BlueprintInstanceStatus.ERROR
|
instance.status = BlueprintInstanceStatus.ERROR
|
||||||
self.set_error(exc)
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
finally:
|
finally:
|
||||||
if instance:
|
if instance:
|
||||||
instance.save()
|
instance.save()
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
"""authentik brands app"""
|
|
||||||
|
|
||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class AuthentikBrandsConfig(AppConfig):
|
|
||||||
"""authentik Brand app"""
|
|
||||||
|
|
||||||
name = "authentik.brands"
|
|
||||||
label = "authentik_brands"
|
|
||||||
verbose_name = "authentik Brands"
|
|
@ -1,27 +0,0 @@
|
|||||||
"""Inject brand into current request"""
|
|
||||||
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from django.http.request import HttpRequest
|
|
||||||
from django.http.response import HttpResponse
|
|
||||||
from django.utils.translation import activate
|
|
||||||
|
|
||||||
from authentik.brands.utils import get_brand_for_request
|
|
||||||
|
|
||||||
|
|
||||||
class BrandMiddleware:
|
|
||||||
"""Add current brand to http request"""
|
|
||||||
|
|
||||||
get_response: Callable[[HttpRequest], HttpResponse]
|
|
||||||
|
|
||||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
|
||||||
self.get_response = get_response
|
|
||||||
|
|
||||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
|
||||||
if not hasattr(request, "brand"):
|
|
||||||
brand = get_brand_for_request(request)
|
|
||||||
setattr(request, "brand", brand)
|
|
||||||
locale = brand.default_locale
|
|
||||||
if locale != "":
|
|
||||||
activate(locale)
|
|
||||||
return self.get_response(request)
|
|
@ -1,21 +0,0 @@
|
|||||||
# Generated by Django 4.2.7 on 2023-12-12 06:41
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_brands", "0004_tenant_flow_device_code"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RenameField(
|
|
||||||
model_name="brand",
|
|
||||||
old_name="tenant_uuid",
|
|
||||||
new_name="brand_uuid",
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="brand",
|
|
||||||
name="event_retention",
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,86 +0,0 @@
|
|||||||
"""brand models"""
|
|
||||||
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from django.db import models
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from rest_framework.serializers import Serializer
|
|
||||||
from structlog.stdlib import get_logger
|
|
||||||
|
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
|
||||||
from authentik.flows.models import Flow
|
|
||||||
from authentik.lib.models import SerializerModel
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
class Brand(SerializerModel):
|
|
||||||
"""Single brand"""
|
|
||||||
|
|
||||||
brand_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
|
||||||
domain = models.TextField(
|
|
||||||
help_text=_(
|
|
||||||
"Domain that activates this brand. Can be a superset, i.e. `a.b` for `aa.b` and `ba.b`"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
default = models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
branding_title = models.TextField(default="authentik")
|
|
||||||
|
|
||||||
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
|
||||||
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
|
||||||
|
|
||||||
flow_authentication = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
|
||||||
)
|
|
||||||
flow_invalidation = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_invalidation"
|
|
||||||
)
|
|
||||||
flow_recovery = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_recovery"
|
|
||||||
)
|
|
||||||
flow_unenrollment = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_unenrollment"
|
|
||||||
)
|
|
||||||
flow_user_settings = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_user_settings"
|
|
||||||
)
|
|
||||||
flow_device_code = models.ForeignKey(
|
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code"
|
|
||||||
)
|
|
||||||
|
|
||||||
web_certificate = models.ForeignKey(
|
|
||||||
CertificateKeyPair,
|
|
||||||
null=True,
|
|
||||||
default=None,
|
|
||||||
on_delete=models.SET_DEFAULT,
|
|
||||||
help_text=_("Web Certificate used by the authentik Core webserver."),
|
|
||||||
)
|
|
||||||
attributes = models.JSONField(default=dict, blank=True)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serializer(self) -> Serializer:
|
|
||||||
from authentik.brands.api import BrandSerializer
|
|
||||||
|
|
||||||
return BrandSerializer
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default_locale(self) -> str:
|
|
||||||
"""Get default locale"""
|
|
||||||
try:
|
|
||||||
return self.attributes.get("settings", {}).get("locale", "")
|
|
||||||
# pylint: disable=broad-except
|
|
||||||
except Exception as exc:
|
|
||||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
if self.default:
|
|
||||||
return "Default brand"
|
|
||||||
return f"Brand {self.domain}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _("Brand")
|
|
||||||
verbose_name_plural = _("Brands")
|
|
@ -1,77 +0,0 @@
|
|||||||
"""Test brands"""
|
|
||||||
|
|
||||||
from django.urls import reverse
|
|
||||||
from rest_framework.test import APITestCase
|
|
||||||
|
|
||||||
from authentik.brands.api import Themes
|
|
||||||
from authentik.brands.models import Brand
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_brand
|
|
||||||
|
|
||||||
|
|
||||||
class TestBrands(APITestCase):
|
|
||||||
"""Test brands"""
|
|
||||||
|
|
||||||
def test_current_brand(self):
|
|
||||||
"""Test Current brand API"""
|
|
||||||
brand = create_test_brand()
|
|
||||||
self.assertJSONEqual(
|
|
||||||
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
|
|
||||||
{
|
|
||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
|
||||||
"branding_title": "authentik",
|
|
||||||
"matched_domain": brand.domain,
|
|
||||||
"ui_footer_links": [],
|
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
|
||||||
"default_locale": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_brand_subdomain(self):
|
|
||||||
"""Test Current brand API"""
|
|
||||||
Brand.objects.all().delete()
|
|
||||||
Brand.objects.create(domain="bar.baz", branding_title="custom")
|
|
||||||
self.assertJSONEqual(
|
|
||||||
self.client.get(
|
|
||||||
reverse("authentik_api:brand-current"), HTTP_HOST="foo.bar.baz"
|
|
||||||
).content.decode(),
|
|
||||||
{
|
|
||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
|
||||||
"branding_title": "custom",
|
|
||||||
"matched_domain": "bar.baz",
|
|
||||||
"ui_footer_links": [],
|
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
|
||||||
"default_locale": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_fallback(self):
|
|
||||||
"""Test fallback brand"""
|
|
||||||
Brand.objects.all().delete()
|
|
||||||
self.assertJSONEqual(
|
|
||||||
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
|
|
||||||
{
|
|
||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
|
||||||
"branding_title": "authentik",
|
|
||||||
"matched_domain": "fallback",
|
|
||||||
"ui_footer_links": [],
|
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
|
||||||
"default_locale": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_create_default_multiple(self):
|
|
||||||
"""Test attempted creation of multiple default brands"""
|
|
||||||
Brand.objects.create(
|
|
||||||
domain="foo",
|
|
||||||
default=True,
|
|
||||||
branding_title="custom",
|
|
||||||
)
|
|
||||||
user = create_test_admin_user()
|
|
||||||
self.client.force_login(user)
|
|
||||||
response = self.client.post(
|
|
||||||
reverse("authentik_api:brand-list"), data={"domain": "bar", "default": True}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
@ -1,7 +0,0 @@
|
|||||||
"""API URLs"""
|
|
||||||
|
|
||||||
from authentik.brands.api import BrandViewSet
|
|
||||||
|
|
||||||
api_urlpatterns = [
|
|
||||||
("core/brands", BrandViewSet),
|
|
||||||
]
|
|
@ -1,42 +0,0 @@
|
|||||||
"""Brand utilities"""
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from django.db.models import F, Q
|
|
||||||
from django.db.models import Value as V
|
|
||||||
from django.http.request import HttpRequest
|
|
||||||
from sentry_sdk.hub import Hub
|
|
||||||
|
|
||||||
from authentik import get_full_version
|
|
||||||
from authentik.brands.models import Brand
|
|
||||||
|
|
||||||
_q_default = Q(default=True)
|
|
||||||
DEFAULT_BRAND = Brand(domain="fallback")
|
|
||||||
|
|
||||||
|
|
||||||
def get_brand_for_request(request: HttpRequest) -> Brand:
|
|
||||||
"""Get brand object for current request"""
|
|
||||||
db_brands = (
|
|
||||||
Brand.objects.annotate(host_domain=V(request.get_host()))
|
|
||||||
.filter(Q(host_domain__iendswith=F("domain")) | _q_default)
|
|
||||||
.order_by("default")
|
|
||||||
)
|
|
||||||
brands = list(db_brands.all())
|
|
||||||
if len(brands) < 1:
|
|
||||||
return DEFAULT_BRAND
|
|
||||||
return brands[0]
|
|
||||||
|
|
||||||
|
|
||||||
def context_processor(request: HttpRequest) -> dict[str, Any]:
|
|
||||||
"""Context Processor that injects brand object into every template"""
|
|
||||||
brand = getattr(request, "brand", DEFAULT_BRAND)
|
|
||||||
trace = ""
|
|
||||||
span = Hub.current.scope.span
|
|
||||||
if span:
|
|
||||||
trace = span.to_traceparent()
|
|
||||||
return {
|
|
||||||
"brand": brand,
|
|
||||||
"footer_links": request.tenant.footer_links,
|
|
||||||
"sentry_trace": trace,
|
|
||||||
"version": get_full_version(),
|
|
||||||
}
|
|
@ -1,18 +1,16 @@
|
|||||||
"""Application API Views"""
|
"""Application API Views"""
|
||||||
|
|
||||||
from copy import copy
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Iterator, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.db.models.functions import ExtractHour
|
from django.db.models.functions import ExtractHour
|
||||||
|
from django.http.response import HttpResponseBadRequest
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
|
||||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
@ -130,16 +128,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def _get_allowed_applications(
|
def _get_allowed_applications(self, queryset: QuerySet) -> list[Application]:
|
||||||
self, pagined_apps: Iterator[Application], user: Optional[User] = None
|
|
||||||
) -> list[Application]:
|
|
||||||
applications = []
|
applications = []
|
||||||
request = self.request._request
|
for application in queryset:
|
||||||
if user:
|
engine = PolicyEngine(application, self.request.user, self.request)
|
||||||
request = copy(request)
|
|
||||||
request.user = user
|
|
||||||
for application in pagined_apps:
|
|
||||||
engine = PolicyEngine(application, request.user, request)
|
|
||||||
engine.build()
|
engine.build()
|
||||||
if engine.passing:
|
if engine.passing:
|
||||||
applications.append(application)
|
applications.append(application)
|
||||||
@ -155,6 +147,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
],
|
],
|
||||||
responses={
|
responses={
|
||||||
200: PolicyTestResultSerializer(),
|
200: PolicyTestResultSerializer(),
|
||||||
|
404: OpenApiResponse(description="for_user user not found"),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, methods=["GET"])
|
@action(detail=True, methods=["GET"])
|
||||||
@ -167,11 +160,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
for_user = request.user
|
for_user = request.user
|
||||||
if request.user.is_superuser and "for_user" in request.query_params:
|
if request.user.is_superuser and "for_user" in request.query_params:
|
||||||
try:
|
try:
|
||||||
for_user = User.objects.filter(pk=request.query_params.get("for_user")).first()
|
for_user = get_object_or_404(User, pk=request.query_params.get("for_user"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValidationError({"for_user": "for_user must be numerical"})
|
return HttpResponseBadRequest("for_user must be numerical")
|
||||||
if not for_user:
|
|
||||||
raise ValidationError({"for_user": "User not found"})
|
|
||||||
engine = PolicyEngine(application, for_user, request)
|
engine = PolicyEngine(application, for_user, request)
|
||||||
engine.use_cache = False
|
engine.use_cache = False
|
||||||
with capture_logs() as logs:
|
with capture_logs() as logs:
|
||||||
@ -196,51 +187,28 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
name="superuser_full_list",
|
name="superuser_full_list",
|
||||||
location=OpenApiParameter.QUERY,
|
location=OpenApiParameter.QUERY,
|
||||||
type=OpenApiTypes.BOOL,
|
type=OpenApiTypes.BOOL,
|
||||||
),
|
)
|
||||||
OpenApiParameter(
|
|
||||||
name="for_user",
|
|
||||||
location=OpenApiParameter.QUERY,
|
|
||||||
type=OpenApiTypes.INT,
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
"""Custom list method that checks Policy based access instead of guardian"""
|
"""Custom list method that checks Policy based access instead of guardian"""
|
||||||
should_cache = request.query_params.get("search", "") == ""
|
should_cache = request.GET.get("search", "") == ""
|
||||||
|
|
||||||
superuser_full_list = (
|
superuser_full_list = str(request.GET.get("superuser_full_list", "false")).lower() == "true"
|
||||||
str(request.query_params.get("superuser_full_list", "false")).lower() == "true"
|
|
||||||
)
|
|
||||||
if superuser_full_list and request.user.is_superuser:
|
if superuser_full_list and request.user.is_superuser:
|
||||||
return super().list(request)
|
return super().list(request)
|
||||||
|
|
||||||
queryset = self._filter_queryset_for_list(self.get_queryset())
|
queryset = self._filter_queryset_for_list(self.get_queryset())
|
||||||
pagined_apps = self.paginate_queryset(queryset)
|
self.paginate_queryset(queryset)
|
||||||
|
|
||||||
if "for_user" in request.query_params:
|
|
||||||
try:
|
|
||||||
for_user: int = int(request.query_params.get("for_user", 0))
|
|
||||||
for_user = (
|
|
||||||
get_objects_for_user(request.user, "authentik_core.view_user_applications")
|
|
||||||
.filter(pk=for_user)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if not for_user:
|
|
||||||
raise ValidationError({"for_user": "User not found"})
|
|
||||||
except ValueError as exc:
|
|
||||||
raise ValidationError from exc
|
|
||||||
allowed_applications = self._get_allowed_applications(pagined_apps, user=for_user)
|
|
||||||
serializer = self.get_serializer(allowed_applications, many=True)
|
|
||||||
return self.get_paginated_response(serializer.data)
|
|
||||||
|
|
||||||
allowed_applications = []
|
allowed_applications = []
|
||||||
if not should_cache:
|
if not should_cache:
|
||||||
allowed_applications = self._get_allowed_applications(pagined_apps)
|
allowed_applications = self._get_allowed_applications(queryset)
|
||||||
if should_cache:
|
if should_cache:
|
||||||
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
||||||
if not allowed_applications:
|
if not allowed_applications:
|
||||||
LOGGER.debug("Caching allowed application list")
|
LOGGER.debug("Caching allowed application list")
|
||||||
allowed_applications = self._get_allowed_applications(pagined_apps)
|
allowed_applications = self._get_allowed_applications(queryset)
|
||||||
cache.set(
|
cache.set(
|
||||||
user_app_cache_key(self.request.user.pk),
|
user_app_cache_key(self.request.user.pk),
|
||||||
allowed_applications,
|
allowed_applications,
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""AuthenticatedSessions API Viewset"""
|
"""AuthenticatedSessions API Viewset"""
|
||||||
|
|
||||||
from typing import Optional, TypedDict
|
from typing import Optional, TypedDict
|
||||||
|
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
@ -15,8 +14,7 @@ from ua_parser import user_agent_parser
|
|||||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.models import AuthenticatedSession
|
from authentik.core.models import AuthenticatedSession
|
||||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict
|
from authentik.events.geo import GEOIP_READER, GeoIPDict
|
||||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict
|
|
||||||
|
|
||||||
|
|
||||||
class UserAgentDeviceDict(TypedDict):
|
class UserAgentDeviceDict(TypedDict):
|
||||||
@ -61,7 +59,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||||||
current = SerializerMethodField()
|
current = SerializerMethodField()
|
||||||
user_agent = SerializerMethodField()
|
user_agent = SerializerMethodField()
|
||||||
geo_ip = SerializerMethodField()
|
geo_ip = SerializerMethodField()
|
||||||
asn = SerializerMethodField()
|
|
||||||
|
|
||||||
def get_current(self, instance: AuthenticatedSession) -> bool:
|
def get_current(self, instance: AuthenticatedSession) -> bool:
|
||||||
"""Check if session is currently active session"""
|
"""Check if session is currently active session"""
|
||||||
@ -73,12 +70,8 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||||||
return user_agent_parser.Parse(instance.last_user_agent)
|
return user_agent_parser.Parse(instance.last_user_agent)
|
||||||
|
|
||||||
def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]: # pragma: no cover
|
def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]: # pragma: no cover
|
||||||
"""Get GeoIP Data"""
|
"""Get parsed user agent"""
|
||||||
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip)
|
return GEOIP_READER.city_dict(instance.last_ip)
|
||||||
|
|
||||||
def get_asn(self, instance: AuthenticatedSession) -> Optional[ASNDict]: # pragma: no cover
|
|
||||||
"""Get ASN Data"""
|
|
||||||
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AuthenticatedSession
|
model = AuthenticatedSession
|
||||||
@ -87,7 +80,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||||||
"current",
|
"current",
|
||||||
"user_agent",
|
"user_agent",
|
||||||
"geo_ip",
|
"geo_ip",
|
||||||
"asn",
|
|
||||||
"user",
|
"user",
|
||||||
"last_ip",
|
"last_ip",
|
||||||
"last_user_agent",
|
"last_user_agent",
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Authenticator Devices API Views"""
|
"""Authenticator Devices API Views"""
|
||||||
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Groups API Viewset"""
|
"""Groups API Viewset"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@ -9,7 +8,7 @@ from django_filters.filterset import FilterSet
|
|||||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, IntegerField
|
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
@ -17,7 +16,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.rbac.api.roles import RoleSerializer
|
from authentik.rbac.api.roles import RoleSerializer
|
||||||
|
|
||||||
@ -25,7 +24,7 @@ from authentik.rbac.api.roles import RoleSerializer
|
|||||||
class GroupMemberSerializer(ModelSerializer):
|
class GroupMemberSerializer(ModelSerializer):
|
||||||
"""Stripped down user serializer to show relevant users for groups"""
|
"""Stripped down user serializer to show relevant users for groups"""
|
||||||
|
|
||||||
attributes = JSONDictField(required=False)
|
attributes = JSONField(validators=[is_dict], required=False)
|
||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -45,7 +44,7 @@ class GroupMemberSerializer(ModelSerializer):
|
|||||||
class GroupSerializer(ModelSerializer):
|
class GroupSerializer(ModelSerializer):
|
||||||
"""Group Serializer"""
|
"""Group Serializer"""
|
||||||
|
|
||||||
attributes = JSONDictField(required=False)
|
attributes = JSONField(validators=[is_dict], required=False)
|
||||||
users_obj = ListSerializer(
|
users_obj = ListSerializer(
|
||||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||||
)
|
)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""PropertyMapping API Views"""
|
"""PropertyMapping API Views"""
|
||||||
|
|
||||||
from json import dumps
|
from json import dumps
|
||||||
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
@ -118,11 +117,7 @@ class PropertyMappingViewSet(
|
|||||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||||
def test(self, request: Request, pk: str) -> Response:
|
def test(self, request: Request, pk: str) -> Response:
|
||||||
"""Test Property Mapping"""
|
"""Test Property Mapping"""
|
||||||
_mapping: PropertyMapping = self.get_object()
|
mapping: PropertyMapping = self.get_object()
|
||||||
# Use `get_subclass` to get correct class and correct `.evaluate` implementation
|
|
||||||
mapping = PropertyMapping.objects.get_subclass(pk=_mapping.pk)
|
|
||||||
# FIXME: when we separate policy mappings between ones for sources
|
|
||||||
# and ones for providers, we need to make the user field optional for the source mapping
|
|
||||||
test_params = PolicyTestSerializer(data=request.data)
|
test_params = PolicyTestSerializer(data=request.data)
|
||||||
if not test_params.is_valid():
|
if not test_params.is_valid():
|
||||||
return Response(test_params.errors, status=400)
|
return Response(test_params.errors, status=400)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Provider API Views"""
|
"""Provider API Views"""
|
||||||
|
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.db.models.query import Q
|
from django.db.models.query import Q
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
@ -17,7 +16,6 @@ from rest_framework.viewsets import GenericViewSet
|
|||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.enterprise.apps import EnterpriseConfig
|
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
@ -115,7 +113,6 @@ class ProviderViewSet(
|
|||||||
"description": subclass.__doc__,
|
"description": subclass.__doc__,
|
||||||
"component": subclass().component,
|
"component": subclass().component,
|
||||||
"model_name": subclass._meta.model_name,
|
"model_name": subclass._meta.model_name,
|
||||||
"requires_enterprise": isinstance(subclass._meta.app_config, EnterpriseConfig),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
data.append(
|
data.append(
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Source API Views"""
|
"""Source API Views"""
|
||||||
|
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Tokens API Viewset"""
|
"""Tokens API Viewset"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user