Compare commits
1 Commits
version-20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
fd7a7a6e64 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2023.2.3
|
||||
current_version = 2022.8.2
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
@ -17,4 +17,4 @@ tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:file:internal/constants/constants.go]
|
||||
|
||||
[bumpversion:file:web/src/common/constants.ts]
|
||||
[bumpversion:file:web/src/constants.ts]
|
||||
|
@ -1,8 +1,8 @@
|
||||
env
|
||||
static
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
Dockerfile
|
||||
|
@ -11,7 +11,38 @@ runs:
|
||||
steps:
|
||||
- name: Generate config
|
||||
id: ev
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
shell: python
|
||||
run: |
|
||||
"""Helper script to get the actual branch name, docker safe"""
|
||||
import os
|
||||
from time import time
|
||||
|
||||
env_pr_branch = "GITHUB_HEAD_REF"
|
||||
default_branch = "GITHUB_REF"
|
||||
sha = "GITHUB_SHA"
|
||||
|
||||
branch_name = os.environ[default_branch]
|
||||
if os.environ.get(env_pr_branch, "") != "":
|
||||
branch_name = os.environ[env_pr_branch]
|
||||
|
||||
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
||||
|
||||
print("##[set-output name=branchName]%s" % branch_name)
|
||||
print(
|
||||
"##[set-output name=branchNameContainer]%s"
|
||||
% branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
)
|
||||
print("##[set-output name=timestamp]%s" % int(time()))
|
||||
print("##[set-output name=sha]%s" % os.environ[sha])
|
||||
print("##[set-output name=shouldBuild]%s" % should_build)
|
||||
|
||||
import configparser
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
version = parser.get("bumpversion", "current_version")
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
print("##[set-output name=version]%s" % version)
|
||||
print("##[set-output name=versionFamily]%s" % version_family)
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
@ -38,14 +69,6 @@ runs:
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```shell
|
||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
</details>
|
||||
<details>
|
||||
@ -60,17 +83,8 @@ runs:
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}
|
||||
```
|
||||
|
||||
For arm64, use these values:
|
||||
|
||||
```yaml
|
||||
authentik:
|
||||
outposts:
|
||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||
image:
|
||||
repository: ghcr.io/goauthentik/dev-server
|
||||
tag: ${{ inputs.tag }}-arm64
|
||||
# pullPolicy: Always to ensure you always get the latest version
|
||||
pullPolicy: Always
|
||||
```
|
||||
|
||||
Afterwards, run the upgrade commands from the latest release notes.
|
||||
|
41
.github/actions/docker-push-variables/action.yml
vendored
41
.github/actions/docker-push-variables/action.yml
vendored
@ -17,9 +17,6 @@ outputs:
|
||||
sha:
|
||||
description: "sha"
|
||||
value: ${{ steps.ev.outputs.sha }}
|
||||
shortHash:
|
||||
description: "shortHash"
|
||||
value: ${{ steps.ev.outputs.shortHash }}
|
||||
version:
|
||||
description: "version"
|
||||
value: ${{ steps.ev.outputs.version }}
|
||||
@ -35,28 +32,32 @@ runs:
|
||||
shell: python
|
||||
run: |
|
||||
"""Helper script to get the actual branch name, docker safe"""
|
||||
import configparser
|
||||
import os
|
||||
from time import time
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
env_pr_branch = "GITHUB_HEAD_REF"
|
||||
default_branch = "GITHUB_REF"
|
||||
sha = "GITHUB_SHA"
|
||||
|
||||
branch_name = os.environ["GITHUB_REF"]
|
||||
if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
branch_name = os.environ[default_branch]
|
||||
if os.environ.get(env_pr_branch, "") != "":
|
||||
branch_name = os.environ[env_pr_branch]
|
||||
|
||||
should_build = str(os.environ.get("DOCKER_USERNAME", "") != "").lower()
|
||||
|
||||
print("##[set-output name=branchName]%s" % branch_name)
|
||||
print(
|
||||
"##[set-output name=branchNameContainer]%s"
|
||||
% branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
)
|
||||
print("##[set-output name=timestamp]%s" % int(time()))
|
||||
print("##[set-output name=sha]%s" % os.environ[sha])
|
||||
print("##[set-output name=shouldBuild]%s" % should_build)
|
||||
|
||||
import configparser
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(".bumpversion.cfg")
|
||||
version = parser.get("bumpversion", "current_version")
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
||||
print("##[set-output name=version]%s" % version)
|
||||
print("##[set-output name=versionFamily]%s" % version_family)
|
||||
|
12
.github/actions/setup/action.yml
vendored
12
.github/actions/setup/action.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: 'Setup authentik testing environment'
|
||||
description: 'Setup authentik testing environment'
|
||||
name: 'Setup authentik testing environemnt'
|
||||
description: 'Setup authentik testing environemnt'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -13,21 +13,21 @@ runs:
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3.1.0
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry env use python3.10
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
npm install -g pyright@1.1.136
|
||||
- name: Generate config
|
||||
shell: poetry run python {0}
|
||||
run: |
|
||||
|
1
.github/codespell-words.txt
vendored
1
.github/codespell-words.txt
vendored
@ -1,4 +1,3 @@
|
||||
keypair
|
||||
keypairs
|
||||
hass
|
||||
warmup
|
||||
|
16
.github/transifex.yml
vendored
16
.github/transifex.yml
vendored
@ -1,16 +0,0 @@
|
||||
git:
|
||||
filters:
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: web/src/locales/en.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: locale/en/LC_MESSAGES/django.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
148
.github/workflows/ci-main.yml
vendored
148
.github/workflows/ci-main.yml
vendored
@ -46,7 +46,6 @@ jobs:
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
@ -80,61 +79,54 @@ jobs:
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [unittest]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: unit
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.5.0
|
||||
uses: helm/kind-action@v1.3.0
|
||||
- name: run integration
|
||||
run: |
|
||||
poetry run coverage run manage.py test tests/integration
|
||||
poetry run make test-integration
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [integration]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: integration
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
test-e2e-provider:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: proxy
|
||||
glob: tests/e2e/test_provider_proxy*
|
||||
- name: oauth
|
||||
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
|
||||
- name: oauth-oidc
|
||||
glob: tests/e2e/test_provider_oidc*
|
||||
- name: saml
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: Setup authentik env
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
@ -151,12 +143,48 @@ jobs:
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
poetry run make test-e2e-provider
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [e2e-provider]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
test-e2e-rest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
flags: e2e
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: Setup authentik env
|
||||
run: |
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
npm run build
|
||||
- name: run e2e
|
||||
run: |
|
||||
poetry run make test-e2e-rest
|
||||
poetry run coverage xml
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace [e2e-rest]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v3
|
||||
ci-core-mark:
|
||||
needs:
|
||||
- lint
|
||||
@ -164,7 +192,8 @@ jobs:
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e
|
||||
- test-e2e-rest
|
||||
- test-e2e-provider
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
@ -172,10 +201,15 @@ jobs:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -190,58 +224,20 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
uses: ./.github/actions/comment-pr-instructions
|
||||
with:
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
|
18
.github/workflows/ci-outpost.yml
vendored
18
.github/workflows/ci-outpost.yml
vendored
@ -28,8 +28,6 @@ jobs:
|
||||
run: make gen-client-go
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
args: --timeout 5000s
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -49,7 +47,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build-container:
|
||||
build:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -65,7 +63,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -82,20 +80,20 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
|
||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
context: .
|
||||
build-binary:
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -113,9 +111,9 @@ jobs:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
|
33
.github/workflows/ci-web.yml
vendored
33
.github/workflows/ci-web.yml
vendored
@ -15,9 +15,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
@ -27,29 +27,13 @@ jobs:
|
||||
- name: Eslint
|
||||
working-directory: web/
|
||||
run: npm run lint
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: TSC
|
||||
working-directory: web/
|
||||
run: npm run tsc
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
@ -63,9 +47,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
@ -85,7 +69,6 @@ jobs:
|
||||
- lint-eslint
|
||||
- lint-prettier
|
||||
- lint-lit-analyse
|
||||
- lint-build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
@ -95,9 +78,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
19
.github/workflows/ci-website.yml
vendored
19
.github/workflows/ci-website.yml
vendored
@ -15,9 +15,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
@ -25,24 +25,9 @@ jobs:
|
||||
- name: prettier
|
||||
working-directory: website/
|
||||
run: npm run prettier-check
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: test
|
||||
working-directory: website/
|
||||
run: npm test
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-prettier
|
||||
- test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
|
4
.github/workflows/ghcr-retention.yml
vendored
4
.github/workflows/ghcr-retention.yml
vendored
@ -11,12 +11,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: snok/container-retention-policy@v1
|
||||
uses: sondrelg/container-retention-policy@v1
|
||||
with:
|
||||
image-names: dev-server,dev-ldap,dev-proxy
|
||||
cut-off: One week ago UTC
|
||||
account-type: org
|
||||
org-name: goauthentik
|
||||
untagged-only: false
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
|
||||
skip-tags: gh-next,gh-main
|
||||
|
28
.github/workflows/release-publish.yml
vendored
28
.github/workflows/release-publish.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -27,13 +27,10 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
tags: |
|
||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||
@ -42,8 +39,7 @@ jobs:
|
||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
context: .
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@ -58,7 +54,7 @@ jobs:
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
@ -75,8 +71,8 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
@ -88,8 +84,6 @@ jobs:
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
@ -106,9 +100,9 @@ jobs:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
@ -167,9 +161,11 @@ jobs:
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: authentik-security-inc
|
||||
SENTRY_ORG: beryjuorg
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@${{ steps.ev.outputs.version }}
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
4
.github/workflows/release-tag.yml
vendored
4
.github/workflows/release-tag.yml
vendored
@ -26,14 +26,14 @@ jobs:
|
||||
id: get_version
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.1.4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
|
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
@ -19,8 +19,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run compile
|
||||
@ -29,7 +27,7 @@ jobs:
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: compile-backend-translation
|
||||
commit-message: "core: compile backend translations"
|
||||
title: "core: compile backend translations"
|
||||
|
19
.github/workflows/web-api-publish.yml
vendored
19
.github/workflows/web-api-publish.yml
vendored
@ -10,11 +10,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v3.4.1
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: '18'
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
@ -30,20 +29,14 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v4
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: update-web-api-client
|
||||
commit-message: "web: bump API Client version"
|
||||
title: "web: bump API Client version"
|
||||
body: "web: bump API Client version"
|
||||
delete-branch: true
|
||||
signoff: true
|
||||
team-reviewers: "@goauthentik/core"
|
||||
author: authentik bot <github-bot@goauthentik.io>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
merge-method: squash
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -194,9 +194,11 @@ pip-selfcheck.json
|
||||
/static/
|
||||
local.env.yml
|
||||
|
||||
# Selenium Screenshots
|
||||
selenium_screenshots/
|
||||
backups/
|
||||
media/
|
||||
*mmdb
|
||||
|
||||
.idea/
|
||||
/gen-*/
|
||||
data/
|
||||
|
23
.vscode/settings.json
vendored
23
.vscode/settings.json
vendored
@ -14,9 +14,7 @@
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless",
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo"
|
||||
"kubernetes"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
@ -24,13 +22,7 @@
|
||||
"python.formatting.provider": "black",
|
||||
"yaml.customTags": [
|
||||
"!Find sequence",
|
||||
"!KeyOf scalar",
|
||||
"!Context scalar",
|
||||
"!Context sequence",
|
||||
"!Format sequence",
|
||||
"!Condition sequence",
|
||||
"!Env sequence",
|
||||
"!Env scalar"
|
||||
"!KeyOf scalar"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||
@ -38,14 +30,5 @@
|
||||
"typescript.enablePromptUseWorkspaceTsdk": true,
|
||||
"yaml.schemas": {
|
||||
"./blueprints/schema.json": "blueprints/**/*.yaml"
|
||||
},
|
||||
"gitlens.autolinks": [
|
||||
{
|
||||
"alphanumeric": true,
|
||||
"prefix": "#<num>",
|
||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||
"ignoreCase": false
|
||||
}
|
||||
],
|
||||
"go.testFlags": ["-count=1"]
|
||||
}
|
||||
}
|
||||
|
@ -17,24 +17,24 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
|
@ -11,22 +11,19 @@ The following is a set of guidelines for contributing to authentik and its compo
|
||||
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
||||
|
||||
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
||||
|
||||
- [The components](#the-components)
|
||||
- [authentik's structure](#authentiks-structure)
|
||||
* [The components](#the-components)
|
||||
* [authentik's structure](#authentiks-structure)
|
||||
|
||||
[How Can I Contribute?](#how-can-i-contribute)
|
||||
|
||||
- [Reporting Bugs](#reporting-bugs)
|
||||
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||
- [Your First Code Contribution](#your-first-code-contribution)
|
||||
- [Pull Requests](#pull-requests)
|
||||
* [Reporting Bugs](#reporting-bugs)
|
||||
* [Suggesting Enhancements](#suggesting-enhancements)
|
||||
* [Your First Code Contribution](#your-first-code-contribution)
|
||||
* [Pull Requests](#pull-requests)
|
||||
|
||||
[Styleguides](#styleguides)
|
||||
|
||||
- [Git Commit Messages](#git-commit-messages)
|
||||
- [Python Styleguide](#python-styleguide)
|
||||
- [Documentation Styleguide](#documentation-styleguide)
|
||||
* [Git Commit Messages](#git-commit-messages)
|
||||
* [Python Styleguide](#python-styleguide)
|
||||
* [Documentation Styleguide](#documentation-styleguide)
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
@ -42,11 +39,11 @@ Either [create a question on GitHub](https://github.com/goauthentik/authentik/is
|
||||
|
||||
authentik consists of a few larger components:
|
||||
|
||||
- _authentik_ the actual application server, is described below.
|
||||
- _outpost-proxy_ is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- _outpost-ldap_ is a Go LDAP server that uses the _authentik_ application server as its backend
|
||||
- _web_ is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- _website_ is the Website/documentation, which uses docusaurus.
|
||||
- *authentik* the actual application server, is described below.
|
||||
- *outpost-proxy* is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||
- *outpost-ldap* is a Go LDAP server that uses the *authentik* application server as its backend
|
||||
- *web* is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||
- *website* is the Website/documentation, which uses docusaurus.
|
||||
|
||||
### authentik's structure
|
||||
|
||||
@ -59,18 +56,19 @@ These are the current packages:
|
||||
authentik
|
||||
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
||||
├── api - General API Configuration (Routes, Schema and general API utilities)
|
||||
├── blueprints - Handle managed models and their state.
|
||||
├── core - Core authentik functionality, central routes, core Models
|
||||
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
||||
├── events - Event Log, middleware and signals to generate signals
|
||||
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
||||
├── lib - Generic library of functions, few dependencies on other packages.
|
||||
├── managed - Handle managed models and their state.
|
||||
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
||||
├── policies - General PolicyEngine
|
||||
│ ├── dummy - A Dummy policy used for testing
|
||||
│ ├── event_matcher - Match events based on different criteria
|
||||
│ ├── expiry - Check when a user's password was last set
|
||||
│ ├── expression - Execute any arbitrary python code
|
||||
│ ├── hibp - Check a password against HaveIBeenPwned
|
||||
│ ├── password - Check a password against several rules
|
||||
│ └── reputation - Check the user's/client's reputation
|
||||
├── providers
|
||||
@ -139,10 +137,10 @@ This is documented in the [developer docs](https://goauthentik.io/developer-docs
|
||||
|
||||
The process described here has several goals:
|
||||
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
- Maintain authentik's quality
|
||||
- Fix problems that are important to users
|
||||
- Engage the community in working toward the best possible authentik
|
||||
- Enable a sustainable system for authentik's maintainers to review contributions
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
@ -156,10 +154,10 @@ While the prerequisites above must be satisfied prior to having your pull reques
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
- Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
- Reference issues and pull requests liberally after the first line
|
||||
* Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
* Reference issues and pull requests liberally after the first line
|
||||
|
||||
### Python Styleguide
|
||||
|
||||
@ -167,11 +165,11 @@ All Python code is linted with [black](https://black.readthedocs.io/en/stable/),
|
||||
|
||||
authentik runs on Python 3.9 at the time of writing this.
|
||||
|
||||
- Use native type-annotations wherever possible.
|
||||
- Add meaningful docstrings when possible.
|
||||
- Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
- If your code changes central functions, make sure nothing else is broken.
|
||||
* Use native type-annotations wherever possible.
|
||||
* Add meaningful docstrings when possible.
|
||||
* Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||
* If your code changes central functions, make sure nothing else is broken.
|
||||
|
||||
### Documentation Styleguide
|
||||
|
||||
- Use [MDX](https://mdxjs.com/) whenever appropriate.
|
||||
* Use [MDX](https://mdxjs.com/) whenever appropriate.
|
||||
|
30
Dockerfile
30
Dockerfile
@ -3,7 +3,6 @@ FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./SECURITY.md /work/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
@ -20,7 +19,7 @@ WORKDIR /work/web
|
||||
RUN npm ci && npm run build
|
||||
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.2-slim-bullseye AS poetry-locker
|
||||
FROM docker.io/python:3.10.6-slim-bullseye AS poetry-locker
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
@ -31,7 +30,7 @@ RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.20.1-bullseye AS go-builder
|
||||
FROM docker.io/golang:1.19.0-bullseye AS go-builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -44,25 +43,10 @@ COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
|
||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "\
|
||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
|
||||
"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.2-slim-bullseye AS final-image
|
||||
# Stage 5: Run
|
||||
FROM docker.io/python:3.10.6-slim-bullseye AS final-image
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
@ -75,11 +59,10 @@ ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
@ -97,7 +80,6 @@ RUN apt-get update && \
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./xml /xml
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
|
687
LICENSE
687
LICENSE
@ -1,21 +1,674 @@
|
||||
MIT License
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (c) 2022 Jens Langhammer
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
48
Makefile
48
Makefile
@ -6,6 +6,15 @@ NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-integration:
|
||||
coverage run manage.py test tests/integration
|
||||
|
||||
test-e2e-provider:
|
||||
coverage run manage.py test tests/e2e/test_provider*
|
||||
|
||||
test-e2e-rest:
|
||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
@ -19,7 +28,7 @@ test-docker:
|
||||
rm -f .env
|
||||
|
||||
test:
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage run manage.py test authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
@ -44,7 +53,7 @@ migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v ./cmd/server/
|
||||
go run -v cmd/server/main.go
|
||||
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
|
||||
@ -60,11 +69,11 @@ gen-build:
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-diff:
|
||||
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||
git show $(shell git tag -l | tail -n 1):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||
docker.io/openapitools/openapi-diff:2.0.1 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
@ -81,11 +90,9 @@ gen-client-ts:
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
-c /local/scripts/api-ts-config.yaml \
|
||||
--additional-properties=npmVersion=${NPM_VERSION} \
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=@goauthentik/api,npmVersion=${NPM_VERSION}
|
||||
mkdir -p web/node_modules/@goauthentik/api
|
||||
\cp -fv scripts/web_api_readme.md gen-ts-api/README.md
|
||||
cd gen-ts-api && npm i
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
@ -117,7 +124,7 @@ gen: gen-build gen-clean gen-client-ts
|
||||
web-build: web-install
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile
|
||||
web: web-lint-fix web-lint
|
||||
|
||||
web-install:
|
||||
cd web && npm ci
|
||||
@ -135,9 +142,6 @@ web-lint:
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
cd web && npm run tsc
|
||||
|
||||
web-extract:
|
||||
cd web && npm run extract
|
||||
|
||||
@ -145,7 +149,7 @@ web-extract:
|
||||
## Website
|
||||
#########################
|
||||
|
||||
website: website-lint-fix website-build
|
||||
website: website-lint-fix
|
||||
|
||||
website-install:
|
||||
cd website && npm ci
|
||||
@ -153,33 +157,30 @@ website-install:
|
||||
website-lint-fix:
|
||||
cd website && npm run prettier
|
||||
|
||||
website-build:
|
||||
cd website && npm run build
|
||||
|
||||
website-watch:
|
||||
cd website && npm run watch
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
PY_SOURCES=authentik tests lifecycle
|
||||
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-pylint: ci--meta-debug
|
||||
pylint $(PY_SOURCES)
|
||||
pylint authentik tests lifecycle
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
black --check authentik tests lifecycle
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
isort --check authentik tests lifecycle
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
bandit -r $(PY_SOURCES)
|
||||
bandit -r authentik tests lifecycle
|
||||
|
||||
ci-pyright: ci--meta-debug
|
||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
||||
pyright e2e lifecycle
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
@ -191,4 +192,7 @@ dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
redis-cli -n 1 flushall
|
||||
redis-cli -n 2 flushall
|
||||
redis-cli -n 3 flushall
|
||||
make migrate
|
||||
|
29
README.md
29
README.md
@ -5,13 +5,14 @@
|
||||
---
|
||||
|
||||
[](https://goauthentik.io/discord)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||
[](https://goauthentik.testspace.com/)
|
||||

|
||||

|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
[](https://www.transifex.com/beryjuorg/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
@ -25,10 +26,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Screenshots
|
||||
|
||||
| Light | Dark |
|
||||
| ------------------------------------------------------ | ----------------------------------------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
Light | Dark
|
||||
--- | ---
|
||||
 | 
|
||||
 | 
|
||||
|
||||
## Development
|
||||
|
||||
@ -38,10 +39,6 @@ See [Development Documentation](https://goauthentik.io/developer-docs/?utm_sourc
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Support
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR!
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
@ -53,3 +50,11 @@ This project is proudly sponsored by:
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
43
SECURITY.md
43
SECURITY.md
@ -1,44 +1,15 @@
|
||||
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | ------------------ |
|
||||
| 2022.12.x | :white_check_mark: |
|
||||
| 2023.1.x | :white_check_mark: |
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2022.6.x | :white_check_mark: |
|
||||
| 2022.7.x | :white_check_mark: |
|
||||
| 2022.8.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
||||
|
||||
## Criticality levels
|
||||
|
||||
### High
|
||||
|
||||
- Authorization bypass
|
||||
- Circumvention of policies
|
||||
|
||||
### Moderate
|
||||
|
||||
- Denial-of-Service attacks
|
||||
|
||||
### Low
|
||||
|
||||
- Unvalidated redirects
|
||||
- Issues requiring uncommon setups
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Issue is reported via Email as listed above.
|
||||
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||
3. A criticality level is assigned.
|
||||
4. A fix is created, and if possible tested by the issue reporter.
|
||||
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
||||
7. The fixed version is released for the supported versions.
|
||||
|
||||
## Getting security notifications
|
||||
|
||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io)
|
||||
|
@ -2,14 +2,16 @@
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.2.3"
|
||||
__version__ = "2022.8.2"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
||||
"""Get build hash"""
|
||||
build_hash = environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||
return fallback if build_hash == "" and fallback else build_hash
|
||||
if build_hash == "" and fallback:
|
||||
return fallback
|
||||
return build_hash
|
||||
|
||||
|
||||
def get_full_version() -> str:
|
||||
|
@ -1,7 +1,4 @@
|
||||
"""authentik administration metrics"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models.functions import ExtractHour
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
@ -24,44 +21,27 @@ class CoordinateSerializer(PassiveSerializer):
|
||||
class LoginMetricsSerializer(PassiveSerializer):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations(self, _):
|
||||
"""Get successful authorizations per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@ from typing import TypedDict
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from gunicorn import version_info as gunicorn_version
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
@ -15,7 +16,6 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
@ -69,7 +69,7 @@ class SystemSerializer(PassiveSerializer):
|
||||
return {
|
||||
"python_version": python_version,
|
||||
"gunicorn_version": ".".join(str(x) for x in gunicorn_version),
|
||||
"environment": get_env(),
|
||||
"environment": "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose",
|
||||
"architecture": platform.machine(),
|
||||
"platform": platform.platform(),
|
||||
"uname": " ".join(platform.uname()),
|
||||
@ -97,14 +97,8 @@ class SystemView(APIView):
|
||||
permission_classes = [IsAdminUser]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
@ -5,15 +5,9 @@ from django.contrib import messages
|
||||
from django.http.response import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -32,7 +26,6 @@ class TaskSerializer(PassiveSerializer):
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||
task_duration = SerializerMethodField()
|
||||
|
||||
status = ChoiceField(
|
||||
source="result.status.name",
|
||||
@ -40,18 +33,13 @@ class TaskSerializer(PassiveSerializer):
|
||||
)
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
||||
"""Get the duration a task took to run"""
|
||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
||||
|
||||
def to_representation(self, instance: TaskInfo):
|
||||
def to_representation(self, instance):
|
||||
"""When a new version of authentik adds fields to TaskInfo,
|
||||
the API will fail with an AttributeError, as the classes
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
# pylint: disable=broad-except
|
||||
except Exception: # pragma: no cover
|
||||
except AttributeError: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
@ -70,16 +58,9 @@ class TaskViewSet(ViewSet):
|
||||
responses={
|
||||
200: TaskSerializer(many=False),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
}
|
||||
)
|
||||
# pylint: disable=invalid-name
|
||||
def retrieve(self, request: Request, pk=None) -> Response:
|
||||
"""Get a single system task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
@ -100,16 +81,9 @@ class TaskViewSet(ViewSet):
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
500: OpenApiResponse(description="Failed to retry task"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"id",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""authentik admin app config"""
|
||||
from prometheus_client import Gauge, Info
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.blueprints.manager import ManagedAppConfig
|
||||
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
GAUGE_WORKERS = Gauge("authentik_admin_workers", "Currently connected workers")
|
||||
|
@ -8,6 +8,7 @@ from authentik.root.monitoring import monitoring_set
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
@ -15,7 +16,8 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.update_metrics()
|
||||
task.set_prom_metrics()
|
||||
|
@ -31,5 +31,4 @@ class AuthentikAPIConfig(AppConfig):
|
||||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
"scheme": "bearer",
|
||||
}
|
||||
|
@ -11,11 +11,12 @@ from authentik.core.middleware import CTX_AUTH_VIA
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def validate_auth(header: bytes) -> Optional[str]:
|
||||
def validate_auth(header: bytes) -> str:
|
||||
"""Validate that the header is in a correct format,
|
||||
returns type and credentials"""
|
||||
auth_credentials = header.decode().strip()
|
||||
@ -32,18 +33,6 @@ def validate_auth(header: bytes) -> Optional[str]:
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
user = auth_user_lookup(raw_header)
|
||||
if not user:
|
||||
return None
|
||||
if not user.is_active:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return user
|
||||
|
||||
|
||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
auth_credentials = validate_auth(raw_header)
|
||||
if not auth_credentials:
|
||||
return None
|
||||
@ -55,8 +44,8 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
CTX_AUTH_VIA.set("api_token")
|
||||
return key_token.user
|
||||
# then try to auth via JWT
|
||||
jwt_token = AccessToken.filter_not_expired(
|
||||
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
jwt_token = RefreshToken.filter_not_expired(
|
||||
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
).first()
|
||||
if jwt_token:
|
||||
# Double-check scopes, since they are saved in a single string
|
||||
|
@ -1,15 +1,9 @@
|
||||
"""API Authorization"""
|
||||
from django.conf import settings
|
||||
from django.db.models import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
@ -23,20 +17,6 @@ class OwnerFilter(BaseFilterBackend):
|
||||
return queryset.filter(**{self.owner_key: request.user})
|
||||
|
||||
|
||||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class OwnerPermissions(BasePermission):
|
||||
"""Authorize requests by an object's owner matching the requesting user"""
|
||||
|
||||
|
@ -62,7 +62,7 @@ window.addEventListener('DOMContentLoaded', (event) => {
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false">
|
||||
<div slot="nav-logo">
|
||||
<img alt="authentik Logo" class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
</div>
|
||||
</rapi-doc>
|
||||
<script>
|
||||
|
@ -1,18 +1,18 @@
|
||||
"""Test API Authentication"""
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
@ -36,18 +36,9 @@ class TestAPIAuth(TestCase):
|
||||
|
||||
def test_bearer_valid(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_bearer_valid_deactivated(self):
|
||||
"""Test valid token"""
|
||||
user = create_test_admin_user()
|
||||
user.is_active = False
|
||||
user.save()
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
@ -64,26 +55,24 @@ class TestAPIAuth(TestCase):
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
token=generate_id(),
|
||||
refresh_token=generate_id(),
|
||||
_scope=SCOPE_AUTHENTIK_API,
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
||||
def test_jwt_missing_scope(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
token=generate_id(),
|
||||
refresh_token=generate_id(),
|
||||
_scope="",
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
@ -28,14 +28,12 @@ class Capabilities(models.TextChoices):
|
||||
CAN_SAVE_MEDIA = "can_save_media"
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
"""Config for error reporting"""
|
||||
|
||||
enabled = BooleanField(read_only=True)
|
||||
sentry_dsn = CharField(read_only=True)
|
||||
environment = CharField(read_only=True)
|
||||
send_pii = BooleanField(read_only=True)
|
||||
traces_sample_rate = FloatField(read_only=True)
|
||||
@ -68,8 +66,6 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.y_bool("impersonation"):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
return caps
|
||||
|
||||
def get_config(self) -> ConfigSerializer:
|
||||
@ -78,7 +74,6 @@ class ConfigView(APIView):
|
||||
{
|
||||
"error_reporting": {
|
||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
|
||||
"environment": CONFIG.y("error_reporting.environment"),
|
||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||
|
@ -45,26 +45,21 @@ from authentik.policies.dummy.api import DummyPolicyViewSet
|
||||
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
||||
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import (
|
||||
AccessTokenViewSet,
|
||||
AuthorizationCodeViewSet,
|
||||
RefreshTokenViewSet,
|
||||
)
|
||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.plex.api.source import PlexSourceViewSet
|
||||
from authentik.sources.plex.api.source_connection import PlexSourceConnectionViewSet
|
||||
from authentik.sources.saml.api.source import SAMLSourceViewSet
|
||||
from authentik.sources.saml.api.source_connection import UserSAMLSourceConnectionViewSet
|
||||
from authentik.sources.saml.api import SAMLSourceViewSet
|
||||
from authentik.stages.authenticator_duo.api import (
|
||||
AuthenticatorDuoStageViewSet,
|
||||
DuoAdminDeviceViewSet,
|
||||
@ -143,7 +138,6 @@ router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/saml", UserSAMLSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
router.register("sources/saml", SAMLSourceViewSet)
|
||||
router.register("sources/oauth", OAuthSourceViewSet)
|
||||
@ -153,6 +147,7 @@ router.register("policies/all", PolicyViewSet)
|
||||
router.register("policies/bindings", PolicyBindingViewSet)
|
||||
router.register("policies/expression", ExpressionPolicyViewSet)
|
||||
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||
router.register("policies/password", PasswordPolicyViewSet)
|
||||
router.register("policies/reputation/scores", ReputationViewSet)
|
||||
@ -166,7 +161,6 @@ router.register("providers/saml", SAMLProviderViewSet)
|
||||
|
||||
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
||||
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
||||
router.register("oauth2/access_tokens", AccessTokenViewSet)
|
||||
|
||||
router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Serializer mixin for managed models"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@ -12,7 +11,6 @@ from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
@ -42,22 +40,8 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
raise ValidationError(exc) from exc
|
||||
return path
|
||||
|
||||
def validate_content(self, content: str) -> str:
|
||||
"""Ensure content (if set) is a valid blueprint"""
|
||||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
|
||||
raise ValidationError(_("Either path or content must be set."))
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = BlueprintInstance
|
||||
fields = [
|
||||
"pk",
|
||||
@ -70,7 +54,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"enabled",
|
||||
"managed_models",
|
||||
"metadata",
|
||||
"content",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"status": {"read_only": True},
|
||||
|
@ -1,46 +1,6 @@
|
||||
"""authentik Blueprints app"""
|
||||
|
||||
from importlib import import_module
|
||||
from inspect import ismethod
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
|
||||
class ManagedAppConfig(AppConfig):
|
||||
"""Basic reconciliation logic for apps"""
|
||||
|
||||
_logger: BoundLogger
|
||||
|
||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||
super().__init__(app_name, *args, **kwargs)
|
||||
self._logger = get_logger().bind(app_name=app_name)
|
||||
|
||||
def ready(self) -> None:
|
||||
self.reconcile()
|
||||
return super().ready()
|
||||
|
||||
def import_module(self, path: str):
|
||||
"""Load module"""
|
||||
import_module(path)
|
||||
|
||||
def reconcile(self) -> None:
|
||||
"""reconcile ourselves"""
|
||||
prefix = "reconcile_"
|
||||
for meth_name in dir(self):
|
||||
meth = getattr(self, meth_name)
|
||||
if not ismethod(meth):
|
||||
continue
|
||||
if not meth_name.startswith(prefix):
|
||||
continue
|
||||
name = meth_name.replace(prefix, "")
|
||||
try:
|
||||
self._logger.debug("Starting reconciler", name=name)
|
||||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
from authentik.blueprints.manager import ManagedAppConfig
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
@ -57,11 +17,6 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
|
||||
def reconcile_blueprints_discover(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover, clear_failed_blueprints
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||
|
||||
blueprints_discover.delay()
|
||||
clear_failed_blueprints.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
self.import_module("authentik.blueprints.v1.meta.apply_blueprint")
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""Apply blueprint from commandline"""
|
||||
from sys import exit as sys_exit
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -22,9 +20,8 @@ class Command(BaseCommand):
|
||||
valid, logs = importer.validate()
|
||||
if not valid:
|
||||
for log in logs:
|
||||
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
LOGGER.debug(**log)
|
||||
raise ValueError("blueprint invalid")
|
||||
importer.apply()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
@ -2,11 +2,12 @@
|
||||
from json import dumps, loads
|
||||
from pathlib import Path
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -28,9 +29,10 @@ class Command(BaseCommand):
|
||||
def set_model_allowed(self):
|
||||
"""Set model enum"""
|
||||
model_names = []
|
||||
for model in registry.get_models():
|
||||
for model in apps.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
if SerializerModel not in model.__mro__:
|
||||
continue
|
||||
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
|
||||
model_names.sort()
|
||||
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names
|
||||
|
@ -41,7 +41,8 @@
|
||||
"$id": "#entry",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model"
|
||||
"model",
|
||||
"identifiers"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
@ -53,21 +54,6 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -81,7 +67,6 @@
|
||||
},
|
||||
"identifiers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"properties": {
|
||||
"pk": {
|
||||
"description": "Commonly available field, may not exist on all models",
|
||||
|
44
authentik/blueprints/manager.py
Normal file
44
authentik/blueprints/manager.py
Normal file
@ -0,0 +1,44 @@
|
||||
"""Managed objects manager"""
|
||||
from importlib import import_module
|
||||
from inspect import ismethod
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class ManagedAppConfig(AppConfig):
|
||||
"""Basic reconciliation logic for apps"""
|
||||
|
||||
_logger: BoundLogger
|
||||
|
||||
def __init__(self, app_name: str, *args, **kwargs) -> None:
|
||||
super().__init__(app_name, *args, **kwargs)
|
||||
self._logger = get_logger().bind(app_name=app_name)
|
||||
|
||||
def ready(self) -> None:
|
||||
self.reconcile()
|
||||
return super().ready()
|
||||
|
||||
def import_module(self, path: str):
|
||||
"""Load module"""
|
||||
import_module(path)
|
||||
|
||||
def reconcile(self) -> None:
|
||||
"""reconcile ourselves"""
|
||||
prefix = "reconcile_"
|
||||
for meth_name in dir(self):
|
||||
meth = getattr(self, meth_name)
|
||||
if not ismethod(meth):
|
||||
continue
|
||||
if not meth_name.startswith(prefix):
|
||||
continue
|
||||
name = meth_name.replace(prefix, "")
|
||||
try:
|
||||
self._logger.debug("Starting reconciler", name=name)
|
||||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
@ -4,7 +4,7 @@ from glob import glob
|
||||
from pathlib import Path
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from dacite.core import from_dict
|
||||
from dacite import from_dict
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
@ -71,6 +71,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [("authentik_flows", "0001_initial")]
|
||||
@ -85,12 +86,7 @@ class Migration(migrations.Migration):
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and"
|
||||
" updated automatically. This is flag only indicates that an object can"
|
||||
" be overwritten by migrations. You can still modify the objects via"
|
||||
" the API, but expect changes to be overwritten in a later update."
|
||||
),
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
|
@ -1,22 +0,0 @@
|
||||
# Generated by Django 4.1.5 on 2023-01-10 19:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="blueprintinstance",
|
||||
name="content",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="path",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
),
|
||||
]
|
@ -1,19 +1,18 @@
|
||||
"""blueprint models"""
|
||||
"""Managed Object models"""
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from requests import RequestException
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
LOGGER = get_logger()
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
|
||||
|
||||
class BlueprintRetrievalFailed(SentryIgnoredException):
|
||||
@ -29,15 +28,18 @@ class ManagedModel(models.Model):
|
||||
null=True,
|
||||
verbose_name=_("Managed by authentik"),
|
||||
help_text=_(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
)
|
||||
),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
@ -59,8 +61,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
name = models.TextField()
|
||||
metadata = models.JSONField(default=dict)
|
||||
path = models.TextField(default="", blank=True)
|
||||
content = models.TextField(default="", blank=True)
|
||||
path = models.TextField()
|
||||
context = models.JSONField(default=dict)
|
||||
last_applied = models.DateTimeField(auto_now=True)
|
||||
last_applied_hash = models.TextField()
|
||||
@ -70,31 +71,18 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
enabled = models.BooleanField(default=True)
|
||||
managed_models = ArrayField(models.TextField(), default=list)
|
||||
|
||||
def retrieve_oci(self) -> str:
|
||||
"""Get blueprint from an OCI registry"""
|
||||
client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
|
||||
try:
|
||||
manifests = client.fetch_manifests()
|
||||
return client.fetch_blobs(manifests)
|
||||
except OCIException as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve_file(self) -> str:
|
||||
"""Get blueprint from path"""
|
||||
try:
|
||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||
with full_path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
except (IOError, OSError) as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
|
||||
def retrieve(self) -> str:
|
||||
"""Retrieve blueprint contents"""
|
||||
if self.path.startswith("oci://"):
|
||||
return self.retrieve_oci()
|
||||
if self.path != "":
|
||||
return self.retrieve_file()
|
||||
return self.content
|
||||
if urlparse(self.path).scheme != "":
|
||||
try:
|
||||
res = get_http_session().get(self.path, timeout=3, allow_redirects=True)
|
||||
res.raise_for_status()
|
||||
return res.text
|
||||
except RequestException as exc:
|
||||
raise BlueprintRetrievalFailed(exc) from exc
|
||||
path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||
with path.open("r", encoding="utf-8") as _file:
|
||||
return _file.read()
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
@ -106,6 +94,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
|
@ -9,9 +9,4 @@ CELERY_BEAT_SCHEDULE = {
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"blueprints_v1_cleanup": {
|
||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
|
@ -5,8 +5,9 @@ from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.blueprints.manager import ManagedAppConfig
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def apply_blueprint(*files: str):
|
||||
@ -45,13 +46,3 @@ def reconcile_app(app_name: str):
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||
fixture = _fixture.read()
|
||||
try:
|
||||
return fixture % kwargs
|
||||
except TypeError:
|
||||
return fixture
|
||||
|
@ -1,21 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- true
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,21 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id1)s"
|
||||
slug: "%(id1)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
||||
- identifiers:
|
||||
name: "%(id2)s"
|
||||
slug: "%(id2)s"
|
||||
model: authentik_flows.flow
|
||||
conditions:
|
||||
- true
|
||||
- false
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,13 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: absent
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
expression: |
|
||||
return True
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
state: absent
|
@ -1,10 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: created
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,10 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
name: "%(id)s"
|
||||
slug: "%(id)s"
|
||||
model: authentik_flows.flow
|
||||
state: present
|
||||
attrs:
|
||||
designation: stage_configuration
|
||||
title: foo
|
@ -1,13 +0,0 @@
|
||||
version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
name: qwerweqrq
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
required: true
|
||||
placeholder: Username
|
||||
order: 0
|
150
authentik/blueprints/tests/fixtures/tags.yaml
vendored
150
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@ -1,150 +0,0 @@
|
||||
version: 1
|
||||
context:
|
||||
foo: bar
|
||||
policy_property: name
|
||||
policy_property_value: foo-bar-baz-qux
|
||||
sequence:
|
||||
- foo
|
||||
- bar
|
||||
mapping:
|
||||
key1: value
|
||||
key2: 2
|
||||
entries:
|
||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%s", present]
|
||||
identifiers:
|
||||
slug: test
|
||||
attrs:
|
||||
name: test
|
||||
provider_type: github
|
||||
consumer_key: !Env foo
|
||||
consumer_secret: !Env [bar, baz]
|
||||
authentication_flow:
|
||||
!Find [
|
||||
authentik_flows.Flow,
|
||||
[slug, default-source-authentication],
|
||||
]
|
||||
enrollment_flow:
|
||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s-%s, !Context foo, !Context bar, qux]
|
||||
id: policy
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
!Find [
|
||||
authentik_policies_expression.expressionpolicy,
|
||||
[
|
||||
!Context policy_property,
|
||||
!Context policy_property_value,
|
||||
],
|
||||
[expression, return True],
|
||||
],
|
||||
suffix,
|
||||
]
|
||||
policy_pk2: !Format ["%s-%s", !KeyOf policy, suffix]
|
||||
boolAnd:
|
||||
!Condition [AND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolNand:
|
||||
!Condition [NAND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolOr:
|
||||
!Condition [
|
||||
OR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolNor:
|
||||
!Condition [
|
||||
NOR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolXor:
|
||||
!Condition [XOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolXnor:
|
||||
!Condition [XNOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
boolComplex:
|
||||
!Condition [
|
||||
XNOR,
|
||||
!Condition [AND, !Context non_existing],
|
||||
!Condition [NOR, a string],
|
||||
!Condition [XOR, null],
|
||||
]
|
||||
if_true_complex:
|
||||
!If [
|
||||
true,
|
||||
{
|
||||
dictionary:
|
||||
{
|
||||
with: { keys: "and_values" },
|
||||
and_nested_custom_tags:
|
||||
!Format ["foo-%s", !Context foo],
|
||||
},
|
||||
},
|
||||
null,
|
||||
]
|
||||
if_false_complex:
|
||||
!If [
|
||||
!Condition [AND, false],
|
||||
null,
|
||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||
]
|
||||
if_true_simple: !If [!Context foo, true, text]
|
||||
if_false_simple: !If [null, false, 2]
|
||||
enumerate_mapping_to_mapping: !Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
||||
]
|
||||
enumerate_mapping_to_sequence: !Enumerate [
|
||||
!Context mapping,
|
||||
SEQ,
|
||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_sequence: !Enumerate [
|
||||
!Context sequence,
|
||||
SEQ,
|
||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_mapping: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[!Format ["index: %d", !Index 0], !Value 0]
|
||||
]
|
||||
nested_complex_enumeration: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[
|
||||
!Index 0,
|
||||
!Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[
|
||||
!Format ["%s", !Index 0],
|
||||
[
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
||||
{
|
||||
outer_value: !Value 1,
|
||||
outer_index: !Index 1,
|
||||
middle_value: !Value 0,
|
||||
middle_index: !Index 0
|
||||
}
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
identifiers:
|
||||
name: test
|
||||
conditions:
|
||||
- !Condition [AND, true, true, text]
|
||||
- true
|
||||
- text
|
||||
model: authentik_core.group
|
@ -16,7 +16,7 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||
"""Test serializer"""
|
||||
|
||||
def tester(self: TestModels):
|
||||
if test_model._meta.abstract: # pragma: no cover
|
||||
if test_model._meta.abstract:
|
||||
return
|
||||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
|
@ -1,98 +0,0 @@
|
||||
"""Test blueprints OCI"""
|
||||
from django.test import TransactionTestCase
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
|
||||
|
||||
|
||||
class TestBlueprintOCI(TransactionTestCase):
|
||||
"""Test Blueprints OCI Tasks"""
|
||||
|
||||
def test_successful(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io/v2/goauthentik/blueprints/test/blobs/foo", text="foo")
|
||||
|
||||
self.assertEqual(
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve(),
|
||||
"foo",
|
||||
)
|
||||
|
||||
def test_manifests_error(self):
|
||||
"""Test manifests request erroring"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest", status_code=401
|
||||
)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_manifests_error_response(self):
|
||||
"""Test manifests request erroring"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={"errors": ["foo"]},
|
||||
)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_no_matching_blob(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE + "foo",
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
||||
|
||||
def test_blob_error(self):
|
||||
"""Successful retrieval"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://ghcr.io/v2/goauthentik/blueprints/test/manifests/latest",
|
||||
json={
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": OCI_MEDIA_TYPE,
|
||||
"digest": "foo",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
mocker.get("https://ghcr.io/v2/goauthentik/blueprints/test/blobs/foo", status_code=401)
|
||||
|
||||
with self.assertRaises(BlueprintRetrievalFailed):
|
||||
BlueprintInstance(
|
||||
path="oci://ghcr.io/goauthentik/blueprints/test:latest"
|
||||
).retrieve_oci()
|
@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/default-tenant.yaml")
|
||||
@apply_blueprint("default/90-default-tenant.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
@ -33,6 +33,4 @@ def blueprint_tester(file_name: Path) -> Callable:
|
||||
|
||||
|
||||
for blueprint_file in Path("blueprints/").glob("**/*.yaml"):
|
||||
if "local" in str(blueprint_file):
|
||||
continue
|
||||
setattr(TestPackaged, f"test_blueprint_{blueprint_file}", blueprint_tester(blueprint_file))
|
||||
|
@ -1,20 +1,41 @@
|
||||
"""Test blueprints v1"""
|
||||
from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||
from authentik.stages.user_login.models import UserLoginStage
|
||||
|
||||
STATIC_PROMPT_EXPORT = """version: 1
|
||||
entries:
|
||||
- identifiers:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
required: true
|
||||
placeholder: Username
|
||||
order: 0
|
||||
"""
|
||||
|
||||
YAML_TAG_TESTS = """version: 1
|
||||
context:
|
||||
foo: bar
|
||||
entries:
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s, !Context foo, !Context bar]
|
||||
id: default-source-enrollment-if-username
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
"""
|
||||
|
||||
|
||||
class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test Blueprints"""
|
||||
@ -24,61 +45,12 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
def test_validated_import_dict_identifiers(self):
|
||||
"""Test importing blueprints with dict identifiers."""
|
||||
Group.objects.filter(name__istartswith="test").delete()
|
||||
|
||||
Group.objects.create(
|
||||
name="test1",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["a_value", "other_value"],
|
||||
},
|
||||
)
|
||||
Group.objects.create(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test2",
|
||||
attributes={
|
||||
"key": ["value"],
|
||||
"other_key": ["diff_value", "other_diff_value"],
|
||||
},
|
||||
(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
name="test999",
|
||||
# All attributes used as identifiers are kept and merged with the
|
||||
# new attributes declared in the blueprint
|
||||
attributes={"key": ["updated_value"], "other_key": ["other_value"]},
|
||||
)
|
||||
)
|
||||
self.assertFalse(Group.objects.filter(name="test1"))
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
def test_export_validate_import(self):
|
||||
"""Test export and validate it"""
|
||||
@ -113,113 +85,25 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(STATIC_PROMPT_EXPORT)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(STATIC_PROMPT_EXPORT)
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
|
||||
def test_import_yaml_tags(self):
|
||||
"""Test some yaml tags"""
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
ExpressionPolicy.objects.filter(name="foo-foo-bar").delete()
|
||||
importer = Importer(YAML_TAG_TESTS, {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
self.assertTrue(policy)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
attributes={
|
||||
"policy_pk1": str(policy.pk) + "-suffix",
|
||||
"policy_pk2": str(policy.pk) + "-suffix",
|
||||
"boolAnd": True,
|
||||
"boolNand": False,
|
||||
"boolOr": True,
|
||||
"boolNor": False,
|
||||
"boolXor": True,
|
||||
"boolXnor": False,
|
||||
"boolComplex": True,
|
||||
"if_true_complex": {
|
||||
"dictionary": {
|
||||
"with": {"keys": "and_values"},
|
||||
"and_nested_custom_tags": "foo-bar",
|
||||
}
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
slug="test",
|
||||
consumer_key=environ["foo"],
|
||||
)
|
||||
)
|
||||
self.assertTrue(ExpressionPolicy.objects.filter(name="foo-foo-bar"))
|
||||
|
||||
def test_export_validate_import_policies(self):
|
||||
"""Test export and validate it"""
|
||||
@ -256,21 +140,15 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
with transaction_rollback():
|
||||
# First stage fields
|
||||
username_prompt = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="username",
|
||||
label="Username",
|
||||
order=0,
|
||||
type=FieldTypes.TEXT,
|
||||
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||
)
|
||||
password = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password",
|
||||
label="Password",
|
||||
order=1,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
password_repeat = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password_repeat",
|
||||
label="Password (repeat)",
|
||||
order=2,
|
||||
|
@ -43,28 +43,3 @@ class TestBlueprintsV1API(APITestCase):
|
||||
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
|
||||
def test_api_blank(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
|
||||
)
|
||||
|
||||
def test_api_content(self):
|
||||
"""Test blank"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:blueprintinstance-list"),
|
||||
data={
|
||||
"name": "foo",
|
||||
"content": '{"version": 3}',
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})
|
||||
|
@ -1,43 +0,0 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test Blueprints conditions attribute"""
|
||||
|
||||
def test_conditions_fulfilled(self):
|
||||
"""Test conditions fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug1).first()
|
||||
self.assertEqual(flow.slug, flow_slug1)
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug2).first()
|
||||
self.assertEqual(flow.slug, flow_slug2)
|
||||
|
||||
def test_conditions_not_fulfilled(self):
|
||||
"""Test conditions not fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug1))
|
||||
self.assertFalse(Flow.objects.filter(slug=flow_slug2))
|
@ -1,82 +0,0 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
"""Test Blueprints state attribute"""
|
||||
|
||||
def test_state_present(self):
|
||||
"""Test state present"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
# Update object
|
||||
flow.title = "bar"
|
||||
flow.save()
|
||||
|
||||
flow.refresh_from_db()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer updates it
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.title, "foo")
|
||||
|
||||
def test_state_created(self):
|
||||
"""Test state created"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
# Update object
|
||||
flow.title = "bar"
|
||||
flow.save()
|
||||
|
||||
flow.refresh_from_db()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer doesn't update it
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
def test_state_absent(self):
|
||||
"""Test state absent"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertIsNone(flow)
|
@ -67,30 +67,25 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_updated(self):
|
||||
"""Test valid file"""
|
||||
BlueprintInstance.objects.filter(name="foo").delete()
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
"version": 1,
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": "foo",
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
BlueprintInstance.objects.first().last_applied_hash,
|
||||
(
|
||||
"b86ec439b3857350714f070d2833490e736d9155d3d97b2cac13f3b352223e5a"
|
||||
"1adbf8ec56fa616d46090cc4773ff9e46c4e509fde96b97de87dd21fa329ca1a"
|
||||
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1b"
|
||||
"d1f9b3526871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||
),
|
||||
)
|
||||
self.assertEqual(blueprint.metadata, {"labels": {}, "name": "foo"})
|
||||
self.assertEqual(BlueprintInstance.objects.first().metadata, {})
|
||||
file.write(
|
||||
dump(
|
||||
{
|
||||
@ -98,28 +93,24 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
"entries": [],
|
||||
"metadata": {
|
||||
"name": "foo",
|
||||
"labels": {
|
||||
"foo": "bar",
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
BlueprintInstance.objects.first().last_applied_hash,
|
||||
(
|
||||
"87b68b10131d2c9751ed308bba38f04734b9e2cdf8532ed617bc52979b063c49"
|
||||
"2564f33f3d20ab9d5f0fd9e6eb77a13942e060199f147789cb7afab9690e72b5"
|
||||
"fc62fea96067da8592bdf90927246d0ca150b045447df93b0652a0e20a8bc327"
|
||||
"681510b5db37ea98759c61f9a98dd2381f46a3b5a2da69dfb45158897f14e824"
|
||||
),
|
||||
)
|
||||
self.assertEqual(
|
||||
blueprint.metadata,
|
||||
BlueprintInstance.objects.first().metadata,
|
||||
{
|
||||
"name": "foo",
|
||||
"labels": {"foo": "bar"},
|
||||
"labels": {},
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -1,15 +1,10 @@
|
||||
"""transfer common classes"""
|
||||
from collections import OrderedDict
|
||||
from copy import copy
|
||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||
from enum import Enum
|
||||
from functools import reduce
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
||||
from typing import Any, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.fields import Field
|
||||
@ -46,33 +41,19 @@ class BlueprintEntryState:
|
||||
instance: Optional[Model] = None
|
||||
|
||||
|
||||
class BlueprintEntryDesiredState(Enum):
|
||||
"""State an entry should be reconciled to"""
|
||||
|
||||
ABSENT = "absent"
|
||||
PRESENT = "present"
|
||||
CREATED = "created"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintEntry:
|
||||
"""Single entry of a blueprint"""
|
||||
|
||||
model: Union[str, "YAMLTag"]
|
||||
state: Union[BlueprintEntryDesiredState, "YAMLTag"] = field(
|
||||
default=BlueprintEntryDesiredState.PRESENT
|
||||
)
|
||||
conditions: list[Any] = field(default_factory=list)
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
identifiers: dict[str, Any]
|
||||
model: str
|
||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
id: Optional[str] = None
|
||||
|
||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||
|
||||
def __post_init__(self, *args, **kwargs) -> None:
|
||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||
@ -82,53 +63,24 @@ class BlueprintEntry:
|
||||
all_attrs = get_attrs(model)
|
||||
|
||||
for extra_identifier_name in extra_identifier_names:
|
||||
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name, None)
|
||||
identifiers[extra_identifier_name] = all_attrs.pop(extra_identifier_name)
|
||||
return BlueprintEntry(
|
||||
identifiers=identifiers,
|
||||
model=f"{model._meta.app_label}.{model._meta.model_name}",
|
||||
attrs=all_attrs,
|
||||
)
|
||||
|
||||
def _get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
raise ValueError("depth must be a positive number or zero")
|
||||
|
||||
if context_tag_type:
|
||||
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
|
||||
else:
|
||||
contexts = self.__tag_contexts
|
||||
|
||||
try:
|
||||
return contexts[-(depth + 1)]
|
||||
except IndexError:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
val = copy(value)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.append(value)
|
||||
|
||||
if isinstance(value, YAMLTag):
|
||||
val = value.resolve(self, blueprint)
|
||||
|
||||
return value.resolve(self, blueprint)
|
||||
if isinstance(value, dict):
|
||||
for key, inner_value in value.items():
|
||||
val[key] = self.tag_resolver(inner_value, blueprint)
|
||||
value[key] = self.tag_resolver(inner_value, blueprint)
|
||||
if isinstance(value, list):
|
||||
for idx, inner_value in enumerate(value):
|
||||
val[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
|
||||
if isinstance(value, YAMLTagContext):
|
||||
self.__tag_contexts.pop()
|
||||
|
||||
return val
|
||||
value[idx] = self.tag_resolver(inner_value, blueprint)
|
||||
return value
|
||||
|
||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
@ -138,18 +90,6 @@ class BlueprintEntry:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.identifiers, blueprint)
|
||||
|
||||
def get_state(self, blueprint: "Blueprint") -> BlueprintEntryDesiredState:
|
||||
"""Get the blueprint state, with yaml tags resolved if present"""
|
||||
return BlueprintEntryDesiredState(self.tag_resolver(self.state, blueprint))
|
||||
|
||||
def get_model(self, blueprint: "Blueprint") -> str:
|
||||
"""Get the blueprint model, with yaml tags resolved if present"""
|
||||
return str(self.tag_resolver(self.model, blueprint))
|
||||
|
||||
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
|
||||
"""Check all conditions of this entry match (evaluate to True)"""
|
||||
return all(self.tag_resolver(self.conditions, blueprint))
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintMetadata:
|
||||
@ -165,9 +105,9 @@ class Blueprint:
|
||||
|
||||
version: int = field(default=1)
|
||||
entries: list[BlueprintEntry] = field(default_factory=list)
|
||||
context: dict = field(default_factory=dict)
|
||||
|
||||
metadata: Optional[BlueprintMetadata] = field(default=None)
|
||||
context: Optional[dict] = field(default_factory=dict)
|
||||
|
||||
|
||||
class YAMLTag:
|
||||
@ -178,19 +118,12 @@ class YAMLTag:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class YAMLTagContext:
|
||||
"""Base class for all YAML Tag Contexts"""
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
"""Implement yaml tag context logic"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class KeyOf(YAMLTag):
|
||||
"""Reference another object by their ID"""
|
||||
|
||||
id_from: str
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.id_from = node.value
|
||||
@ -206,36 +139,18 @@ class KeyOf(YAMLTag):
|
||||
):
|
||||
return _entry._state.instance.pbm_uuid
|
||||
return _entry._state.instance.pk
|
||||
raise EntryInvalidError(
|
||||
raise ValueError(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||
)
|
||||
|
||||
|
||||
class Env(YAMLTag):
|
||||
"""Lookup environment variable with optional default"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return getenv(self.key, self.default)
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
"""Lookup key from instance context"""
|
||||
|
||||
key: str
|
||||
default: Optional[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
@ -258,23 +173,17 @@ class Format(YAMLTag):
|
||||
format_string: str
|
||||
args: list[Any]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
self.args.append(raw_node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
try:
|
||||
return self.format_string % tuple(args)
|
||||
return self.format_string % tuple(self.args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
@ -282,12 +191,15 @@ class Format(YAMLTag):
|
||||
class Find(YAMLTag):
|
||||
"""Find any object"""
|
||||
|
||||
model_name: str | YAMLTag
|
||||
model_name: str
|
||||
conditions: list[list]
|
||||
|
||||
model_class: type[Model]
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.model_name = loader.construct_object(node.value[0])
|
||||
self.model_name = node.value[0].value
|
||||
self.model_class = apps.get_model(*self.model_name.split("."))
|
||||
self.conditions = []
|
||||
for raw_node in node.value[1:]:
|
||||
values = []
|
||||
@ -296,227 +208,15 @@ class Find(YAMLTag):
|
||||
self.conditions.append(values)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.model_name, YAMLTag):
|
||||
model_name = self.model_name.resolve(entry, blueprint)
|
||||
else:
|
||||
model_name = self.model_name
|
||||
|
||||
model_class = apps.get_model(*model_name.split("."))
|
||||
|
||||
query = Q()
|
||||
for cond in self.conditions:
|
||||
if isinstance(cond[0], YAMLTag):
|
||||
query_key = cond[0].resolve(entry, blueprint)
|
||||
else:
|
||||
query_key = cond[0]
|
||||
if isinstance(cond[1], YAMLTag):
|
||||
query_value = cond[1].resolve(entry, blueprint)
|
||||
else:
|
||||
query_value = cond[1]
|
||||
query &= Q(**{query_key: query_value})
|
||||
instance = model_class.objects.filter(query).first()
|
||||
query &= Q(**{cond[0]: cond[1]})
|
||||
instance = self.model_class.objects.filter(query).first()
|
||||
if instance:
|
||||
return instance.pk
|
||||
return None
|
||||
|
||||
|
||||
class Condition(YAMLTag):
|
||||
"""Convert all values to a single boolean"""
|
||||
|
||||
mode: Literal["AND", "NAND", "OR", "NOR", "XOR", "XNOR"]
|
||||
args: list[Any]
|
||||
|
||||
_COMPARATORS = {
|
||||
# Using all and any here instead of from operator import iand, ior
|
||||
# to improve performance
|
||||
"AND": all,
|
||||
"NAND": lambda args: not all(args),
|
||||
"OR": any,
|
||||
"NOR": lambda args: not any(args),
|
||||
"XOR": lambda args: reduce(ixor, args) if len(args) > 1 else args[0],
|
||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
args = []
|
||||
for arg in self.args:
|
||||
if isinstance(arg, YAMLTag):
|
||||
args.append(arg.resolve(entry, blueprint))
|
||||
else:
|
||||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
"""Select YAML to use based on condition"""
|
||||
|
||||
condition: Any
|
||||
when_true: Any
|
||||
when_false: Any
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.condition = loader.construct_object(node.value[0])
|
||||
self.when_true = loader.construct_object(node.value[1])
|
||||
self.when_false = loader.construct_object(node.value[2])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.condition, YAMLTag):
|
||||
condition = self.condition.resolve(entry, blueprint)
|
||||
else:
|
||||
condition = self.condition
|
||||
|
||||
try:
|
||||
return entry.tag_resolver(
|
||||
self.when_true if condition else self.when_false,
|
||||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
"""Iterate over an iterable."""
|
||||
|
||||
iterable: YAMLTag | Iterable
|
||||
item_body: Any
|
||||
output_body: Literal["SEQ", "MAP"]
|
||||
|
||||
_OUTPUT_BODIES = {
|
||||
"SEQ": (list, lambda a, b: [*a, b]),
|
||||
"MAP": (
|
||||
dict,
|
||||
lambda a, b: always_merger.merge(
|
||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = node.value[1].value
|
||||
self.item_body = loader.construct_object(node.value[2])
|
||||
self.__current_context: tuple[Any, Any] = tuple()
|
||||
|
||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return self.__current_context
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
iterable = self.iterable.resolve(entry, blueprint)
|
||||
else:
|
||||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
iterable = tuple(iterable.items())
|
||||
else:
|
||||
iterable = tuple(enumerate(iterable))
|
||||
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
self.__current_context = tuple()
|
||||
|
||||
try:
|
||||
for item in iterable:
|
||||
self.__current_context = item
|
||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class EnumeratedItem(YAMLTag):
|
||||
"""Get the current item value and index provided by an Enumerate tag context"""
|
||||
|
||||
depth: int
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
context_tag: Enumerate = entry._get_tag_context(
|
||||
depth=self.depth,
|
||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
||||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
||||
class Index(EnumeratedItem):
|
||||
"""Get the current item index provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
"""Get the current item value provided by an Enumerate tag context"""
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
context = super().resolve(entry, blueprint)
|
||||
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
"""Dump dataclasses to yaml"""
|
||||
|
||||
@ -527,14 +227,6 @@ class BlueprintDumper(SafeDumper):
|
||||
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
|
||||
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
|
||||
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
|
||||
self.add_representer(
|
||||
BlueprintEntryDesiredState, lambda self, data: self.represent_str(data.value)
|
||||
)
|
||||
self.add_representer(None, lambda self, data: self.represent_str(str(data)))
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
"""Don't use any YAML anchors"""
|
||||
return True
|
||||
|
||||
def represent(self, data) -> None:
|
||||
if is_dataclass(data):
|
||||
@ -557,19 +249,7 @@ class BlueprintLoader(SafeLoader):
|
||||
self.add_constructor("!Find", Find)
|
||||
self.add_constructor("!Context", Context)
|
||||
self.add_constructor("!Format", Format)
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
|
||||
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
serializer_errors: Optional[dict]
|
||||
|
||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.serializer_errors = serializer_errors
|
||||
|
@ -1,13 +1,11 @@
|
||||
"""Blueprint exporter"""
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from uuid import UUID
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db.models import Model, Q, QuerySet
|
||||
from django.db.models import Q
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
@ -18,8 +16,8 @@ from authentik.blueprints.v1.common import (
|
||||
)
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_GENERATED
|
||||
from authentik.events.models import Event
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
from authentik.stages.prompt.models import PromptStage
|
||||
|
||||
@ -27,30 +25,23 @@ from authentik.stages.prompt.models import PromptStage
|
||||
class Exporter:
|
||||
"""Export flow with attached stages into yaml"""
|
||||
|
||||
excluded_models: list[type[Model]] = []
|
||||
excluded_models = []
|
||||
|
||||
def __init__(self):
|
||||
self.excluded_models = [
|
||||
Event,
|
||||
]
|
||||
self.excluded_models = []
|
||||
|
||||
def get_entries(self) -> Iterable[BlueprintEntry]:
|
||||
def get_entries(self) -> Iterator[BlueprintEntry]:
|
||||
"""Get blueprint entries"""
|
||||
for model in apps.get_models():
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
if model in self.excluded_models:
|
||||
continue
|
||||
for obj in self.get_model_instances(model):
|
||||
if SerializerModel not in model.__mro__:
|
||||
continue
|
||||
for obj in model.objects.all():
|
||||
yield BlueprintEntry.from_model(obj)
|
||||
|
||||
def get_model_instances(self, model: type[Model]) -> QuerySet:
|
||||
"""Return a queryset for `model`. Can be used to filter some
|
||||
objects on some models"""
|
||||
if model == get_user_model():
|
||||
return model.objects.exclude(pk=get_anonymous_user().pk)
|
||||
return model.objects.all()
|
||||
|
||||
def _pre_export(self, blueprint: Blueprint):
|
||||
"""Hook to run anything pre-export"""
|
||||
|
||||
@ -96,7 +87,7 @@ class FlowExporter(Exporter):
|
||||
"pbm_uuid", flat=True
|
||||
)
|
||||
|
||||
def walk_stages(self) -> Iterable[BlueprintEntry]:
|
||||
def walk_stages(self) -> Iterator[BlueprintEntry]:
|
||||
"""Convert all stages attached to self.flow into BlueprintEntry objects"""
|
||||
stages = Stage.objects.filter(flow=self.flow).select_related().select_subclasses()
|
||||
for stage in stages:
|
||||
@ -104,13 +95,13 @@ class FlowExporter(Exporter):
|
||||
pass
|
||||
yield BlueprintEntry.from_model(stage, "name")
|
||||
|
||||
def walk_stage_bindings(self) -> Iterable[BlueprintEntry]:
|
||||
def walk_stage_bindings(self) -> Iterator[BlueprintEntry]:
|
||||
"""Convert all bindings attached to self.flow into BlueprintEntry objects"""
|
||||
bindings = FlowStageBinding.objects.filter(target=self.flow).select_related()
|
||||
for binding in bindings:
|
||||
yield BlueprintEntry.from_model(binding, "target", "stage", "order")
|
||||
|
||||
def walk_policies(self) -> Iterable[BlueprintEntry]:
|
||||
def walk_policies(self) -> Iterator[BlueprintEntry]:
|
||||
"""Walk over all policies. This is done at the beginning of the export for stages that have
|
||||
a direct foreign key to a policy."""
|
||||
# Special case for PromptStage as that has a direct M2M to policy, we have to ensure
|
||||
@ -121,21 +112,21 @@ class FlowExporter(Exporter):
|
||||
for policy in policies:
|
||||
yield BlueprintEntry.from_model(policy)
|
||||
|
||||
def walk_policy_bindings(self) -> Iterable[BlueprintEntry]:
|
||||
def walk_policy_bindings(self) -> Iterator[BlueprintEntry]:
|
||||
"""Walk over all policybindings relative to us. This is run at the end of the export, as
|
||||
we are sure all objects exist now."""
|
||||
bindings = PolicyBinding.objects.filter(target__in=self.pbm_uuids).select_related()
|
||||
for binding in bindings:
|
||||
yield BlueprintEntry.from_model(binding, "policy", "target", "order")
|
||||
|
||||
def walk_stage_prompts(self) -> Iterable[BlueprintEntry]:
|
||||
def walk_stage_prompts(self) -> Iterator[BlueprintEntry]:
|
||||
"""Walk over all prompts associated with any PromptStages"""
|
||||
prompt_stages = PromptStage.objects.filter(flow=self.flow)
|
||||
for stage in prompt_stages:
|
||||
for prompt in stage.fields.all():
|
||||
yield BlueprintEntry.from_model(prompt)
|
||||
|
||||
def get_entries(self) -> Iterable[BlueprintEntry]:
|
||||
def get_entries(self) -> Iterator[BlueprintEntry]:
|
||||
entries = []
|
||||
entries.append(BlueprintEntry.from_model(self.flow, "slug"))
|
||||
if self.with_stage_prompts:
|
||||
|
@ -3,11 +3,10 @@ from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional
|
||||
|
||||
from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.apps import apps
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
@ -22,12 +21,10 @@ from yaml import load
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintEntry,
|
||||
BlueprintEntryDesiredState,
|
||||
BlueprintEntryState,
|
||||
BlueprintLoader,
|
||||
EntryInvalidError,
|
||||
)
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.core.models import (
|
||||
AuthenticatedSession,
|
||||
PropertyMapping,
|
||||
@ -35,7 +32,7 @@ from authentik.core.models import (
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
@ -61,10 +58,8 @@ def is_model_allowed(model: type[Model]) -> bool:
|
||||
PolicyBindingModel,
|
||||
# Classes that have other dependencies
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
FlowToken,
|
||||
)
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
return model not in excluded_models
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -87,16 +82,14 @@ class Importer:
|
||||
self.logger = get_logger()
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
self.__import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
self.__import = from_dict(Blueprint, import_dict)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self.__import.context)
|
||||
context = {}
|
||||
always_merger.merge(context, self.__import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self.__import.context = ctx
|
||||
always_merger.merge(context, context)
|
||||
self.__import.context = context
|
||||
|
||||
@property
|
||||
def blueprint(self) -> Blueprint:
|
||||
@ -135,37 +128,22 @@ class Importer:
|
||||
main_query = Q(pk=attrs["pk"])
|
||||
sub_query = Q()
|
||||
for identifier, value in attrs.items():
|
||||
if isinstance(value, dict):
|
||||
continue
|
||||
if identifier == "pk":
|
||||
continue
|
||||
if isinstance(value, dict):
|
||||
sub_query &= Q(**{f"{identifier}__contains": value})
|
||||
else:
|
||||
sub_query &= Q(**{identifier: value})
|
||||
|
||||
sub_query &= Q(**{identifier: value})
|
||||
return main_query | sub_query
|
||||
|
||||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer:
|
||||
"""Validate a single entry"""
|
||||
if not entry.check_all_conditions_match(self.__import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
return None
|
||||
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
model_app_label, model_name = entry.model.split(".")
|
||||
model: type[SerializerModel] = apps.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
if entry.identifiers == {}:
|
||||
raise EntryInvalidError("No identifiers")
|
||||
|
||||
# If we try to validate without referencing a possible instance
|
||||
# we'll get a duplicate error, hence we load the model here and return
|
||||
@ -177,22 +155,11 @@ class Importer:
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
updated_identifiers[f"{key}"] = value["pk"]
|
||||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
existing_models = model.objects.filter(self.__query_from_identifier(updated_identifiers))
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
|
||||
self.logger.debug("instance exists, skipping")
|
||||
return None
|
||||
if existing_models.exists():
|
||||
model_instance = existing_models.first()
|
||||
self.logger.debug(
|
||||
"initialise serializer with instance",
|
||||
model=model,
|
||||
@ -202,28 +169,21 @@ class Importer:
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance", model=model, **updated_identifiers
|
||||
)
|
||||
self.logger.debug("initialise new instance", model=model, **updated_identifiers)
|
||||
model_instance = model()
|
||||
# pk needs to be set on the model instance otherwise a new one will be generated
|
||||
if "pk" in updated_identifiers:
|
||||
model_instance.pk = updated_identifiers["pk"]
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError(exc) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
full_data.update(updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
raise EntryInvalidError(f"Serializer errors {serializer.errors}") from exc
|
||||
return serializer
|
||||
|
||||
def apply(self) -> bool:
|
||||
@ -235,16 +195,17 @@ class Importer:
|
||||
raise IntegrityError
|
||||
except IntegrityError:
|
||||
return False
|
||||
self.logger.debug("Committing changes")
|
||||
else:
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self.__import.entries:
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model_app_label, model_name = entry.model.split(".")
|
||||
try:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
model: SerializerModel = apps.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
self.logger.warning(
|
||||
"app or model does not exist", app=model_app_label, model=model_name
|
||||
@ -254,28 +215,14 @@ class Importer:
|
||||
try:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
self.logger.warning("entry invalid", entry=entry, error=exc)
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
]:
|
||||
model = serializer.save()
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||
entry._state = BlueprintEntryState(model)
|
||||
self.logger.debug("updated model", model=model)
|
||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||
instance: Optional[Model] = serializer.instance
|
||||
if instance.pk:
|
||||
instance.delete()
|
||||
self.logger.debug("deleted model", mode=instance)
|
||||
continue
|
||||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
model = serializer.save()
|
||||
if "pk" in entry.identifiers:
|
||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||
entry._state = BlueprintEntryState(model)
|
||||
self.logger.debug("updated model", model=model, pk=model.pk)
|
||||
return True
|
||||
|
||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||
@ -292,8 +239,8 @@ class Importer:
|
||||
):
|
||||
successful = self._apply_models()
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
self.logger.debug("blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.debug(**log)
|
||||
self.__import = orig_import
|
||||
return successful, logs
|
||||
|
@ -3,4 +3,3 @@
|
||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
||||
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"
|
||||
|
@ -1,59 +0,0 @@
|
||||
"""Apply Blueprint meta model"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, JSONField
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
"""Serializer for meta apply blueprint model"""
|
||||
|
||||
identifiers = JSONField(validators=[is_dict])
|
||||
required = BooleanField(default=True)
|
||||
|
||||
# We cannot override `instance` as that will confuse rest_framework
|
||||
# and make it attempt to update the instance
|
||||
blueprint_instance: "BlueprintInstance"
|
||||
|
||||
def validate(self, attrs):
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
identifiers = attrs["identifiers"]
|
||||
required = attrs["required"]
|
||||
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
||||
if not instance and required:
|
||||
raise ValidationError("Required blueprint does not exist")
|
||||
self.blueprint_instance = instance
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data: dict) -> MetaResult:
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint
|
||||
|
||||
if not self.blueprint_instance:
|
||||
LOGGER.info("Blueprint does not exist, but not required")
|
||||
return MetaResult()
|
||||
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
apply_blueprint(str(self.blueprint_instance.pk))
|
||||
return MetaResult()
|
||||
|
||||
|
||||
@registry.register("metaapplyblueprint")
|
||||
class MetaApplyBlueprint(BaseMetaModel):
|
||||
"""Meta model to apply another blueprint"""
|
||||
|
||||
@staticmethod
|
||||
def serializer() -> ApplyBlueprintMetaSerializer:
|
||||
return ApplyBlueprintMetaSerializer
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
@ -1,60 +0,0 @@
|
||||
"""Base models"""
|
||||
from django.apps import apps
|
||||
from django.db.models import Model
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
|
||||
class BaseMetaModel(Model):
|
||||
"""Base models"""
|
||||
|
||||
@staticmethod
|
||||
def serializer() -> Serializer:
|
||||
"""Serializer similar to SerializerModel, but as a static method since
|
||||
this is an abstract model"""
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class MetaResult:
|
||||
"""Result returned by Meta Models' serializers. Empty class but we can't return none as
|
||||
the framework doesn't allow that"""
|
||||
|
||||
|
||||
class MetaModelRegistry:
|
||||
"""Registry for pseudo meta models"""
|
||||
|
||||
models: dict[str, BaseMetaModel]
|
||||
virtual_prefix: str
|
||||
|
||||
def __init__(self, prefix: str) -> None:
|
||||
self.models = {}
|
||||
self.virtual_prefix = prefix
|
||||
|
||||
def register(self, model_id: str):
|
||||
"""Register model class under `model_id`"""
|
||||
|
||||
def inner_wrapper(cls):
|
||||
self.models[model_id] = cls
|
||||
return cls
|
||||
|
||||
return inner_wrapper
|
||||
|
||||
def get_models(self):
|
||||
"""Wrapper for django's `get_models` to list all models"""
|
||||
models = apps.get_models()
|
||||
for _, value in self.models.items():
|
||||
models.append(value)
|
||||
return models
|
||||
|
||||
def get_model(self, app_label: str, model_id: str) -> type[Model]:
|
||||
"""Get model checks if any virtual models are registered, and falls back
|
||||
to actual django models"""
|
||||
if app_label.lower() == self.virtual_prefix:
|
||||
if model_id.lower() in self.models:
|
||||
return self.models[model_id]
|
||||
return apps.get_model(app_label, model_id)
|
||||
|
||||
|
||||
registry = MetaModelRegistry("authentik_blueprints")
|
@ -1,98 +0,0 @@
|
||||
"""OCI Client"""
|
||||
from typing import Any
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from opencontainers.distribution.reggie import (
|
||||
NewClient,
|
||||
WithDebug,
|
||||
WithDefaultName,
|
||||
WithDigest,
|
||||
WithReference,
|
||||
WithUserAgent,
|
||||
WithUsernamePassword,
|
||||
)
|
||||
from requests.exceptions import RequestException
|
||||
from structlog import get_logger
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import authentik_user_agent
|
||||
|
||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||
|
||||
|
||||
class OCIException(SentryIgnoredException):
|
||||
"""OCI-related errors"""
|
||||
|
||||
|
||||
class BlueprintOCIClient:
|
||||
"""Blueprint OCI Client"""
|
||||
|
||||
url: ParseResult
|
||||
sanitized_url: str
|
||||
logger: BoundLogger
|
||||
ref: str
|
||||
client: NewClient
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
self._parse_url(url)
|
||||
self.logger = get_logger().bind(url=self.sanitized_url)
|
||||
|
||||
self.ref = "latest"
|
||||
path = self.url.path[1:]
|
||||
if ":" in self.url.path:
|
||||
path, _, self.ref = path.partition(":")
|
||||
self.client = NewClient(
|
||||
f"https://{self.url.hostname}",
|
||||
WithUserAgent(authentik_user_agent()),
|
||||
WithUsernamePassword(self.url.username, self.url.password),
|
||||
WithDefaultName(path),
|
||||
WithDebug(True),
|
||||
)
|
||||
|
||||
def _parse_url(self, url: str):
|
||||
self.url = urlparse(url)
|
||||
netloc = self.url.netloc
|
||||
if "@" in netloc:
|
||||
netloc = netloc[netloc.index("@") + 1 :]
|
||||
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
|
||||
|
||||
def fetch_manifests(self) -> dict[str, Any]:
|
||||
"""Fetch manifests for ref"""
|
||||
self.logger.info("Fetching OCI manifests for blueprint")
|
||||
manifest_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/manifests/<reference>",
|
||||
WithReference(self.ref),
|
||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||
try:
|
||||
manifest_response = self.client.Do(manifest_request)
|
||||
manifest_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
||||
manifest = manifest_response.json()
|
||||
if "errors" in manifest:
|
||||
raise OCIException(manifest["errors"])
|
||||
return manifest
|
||||
|
||||
def fetch_blobs(self, manifest: dict[str, Any]):
|
||||
"""Fetch blob based on manifest info"""
|
||||
blob = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||
blob = layer.get("digest")
|
||||
self.logger.debug("Found layer with matching media type", blob=blob)
|
||||
if not blob:
|
||||
raise OCIException("Blob not found")
|
||||
|
||||
blob_request = self.client.NewRequest(
|
||||
"GET",
|
||||
"/v2/<name>/blobs/<digest>",
|
||||
WithDigest(blob),
|
||||
)
|
||||
try:
|
||||
blob_response = self.client.Do(blob_request)
|
||||
blob_response.raise_for_status()
|
||||
return blob_response.text
|
||||
except RequestException as exc:
|
||||
raise OCIException(exc) from exc
|
@ -4,19 +4,11 @@ from hashlib import sha512
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from dacite.core import from_dict
|
||||
from dacite import from_dict
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog.stdlib import get_logger
|
||||
from watchdog.events import (
|
||||
FileCreatedEvent,
|
||||
FileModifiedEvent,
|
||||
FileSystemEvent,
|
||||
FileSystemEventHandler,
|
||||
)
|
||||
from watchdog.observers import Observer
|
||||
from yaml import load
|
||||
from yaml.error import YAMLError
|
||||
|
||||
@ -25,7 +17,7 @@ from authentik.blueprints.models import (
|
||||
BlueprintInstanceStatus,
|
||||
BlueprintRetrievalFailed,
|
||||
)
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.events.monitored_tasks import (
|
||||
@ -39,7 +31,6 @@ from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
_file_watcher_started = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -53,39 +44,6 @@ class BlueprintFile:
|
||||
meta: Optional[BlueprintMetadata] = field(default=None)
|
||||
|
||||
|
||||
def start_blueprint_watcher():
|
||||
"""Start blueprint watcher, if it's not running already."""
|
||||
# This function might be called twice since it's called on celery startup
|
||||
# pylint: disable=global-statement
|
||||
global _file_watcher_started
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
|
||||
class BlueprintEventHandler(FileSystemEventHandler):
|
||||
"""Event handler for blueprint events"""
|
||||
|
||||
def on_any_event(self, event: FileSystemEvent):
|
||||
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discover.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
@ -101,7 +59,8 @@ def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.y("blueprints_dir"))
|
||||
for path in root.glob("**/*.yaml"):
|
||||
for file in root.glob("**/*.yaml"):
|
||||
path = Path(file)
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
@ -117,9 +76,7 @@ def blueprints_find():
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||
continue
|
||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||
blueprint = BlueprintFile(
|
||||
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||
)
|
||||
blueprint = BlueprintFile(path.relative_to(root), version, file_hash, path.stat().st_mtime)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.info(
|
||||
@ -168,7 +125,9 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
)
|
||||
instance.save()
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
instance.metadata = asdict(blueprint.meta) if blueprint.meta else {}
|
||||
instance.save()
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
@ -177,18 +136,15 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
)
|
||||
def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
"""Apply single blueprint"""
|
||||
self.set_uid(instance_pk)
|
||||
self.save_on_success = False
|
||||
instance: Optional[BlueprintInstance] = None
|
||||
try:
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||
self.set_uid(slugify(instance.name))
|
||||
if not instance or not instance.enabled:
|
||||
return
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer(blueprint_content, instance.context)
|
||||
if importer.blueprint.metadata:
|
||||
instance.metadata = asdict(importer.blueprint.metadata)
|
||||
valid, logs = importer.validate()
|
||||
if not valid:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
@ -204,6 +160,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||
instance.last_applied_hash = file_hash
|
||||
instance.last_applied = now()
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||
except (
|
||||
DatabaseError,
|
||||
@ -211,22 +168,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
InternalError,
|
||||
IOError,
|
||||
BlueprintRetrievalFailed,
|
||||
EntryInvalidError,
|
||||
) as exc:
|
||||
if instance:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def clear_failed_blueprints():
|
||||
"""Remove blueprints which couldn't be fetched"""
|
||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
|
||||
try:
|
||||
blueprint.retrieve()
|
||||
except BlueprintRetrievalFailed:
|
||||
blueprint.delete()
|
||||
|
@ -1,10 +1,8 @@
|
||||
"""Application API Views"""
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
@ -25,15 +23,10 @@ from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import FilePathSerializer, FileUploadSerializer
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.utils.file import (
|
||||
FilePathSerializer,
|
||||
FileUploadSerializer,
|
||||
set_file,
|
||||
set_file_url,
|
||||
)
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
@ -44,7 +37,7 @@ LOGGER = get_logger()
|
||||
|
||||
def user_app_cache_key(user_pk: str) -> str:
|
||||
"""Cache key where application list for user is saved"""
|
||||
return f"goauthentik.io/core/app_access/{user_pk}"
|
||||
return f"user_app_cache_{user_pk}"
|
||||
|
||||
|
||||
class ApplicationSerializer(ModelSerializer):
|
||||
@ -57,12 +50,11 @@ class ApplicationSerializer(ModelSerializer):
|
||||
|
||||
def get_launch_url(self, app: Application) -> Optional[str]:
|
||||
"""Allow formatting of launch URL"""
|
||||
user = None
|
||||
if "request" in self.context:
|
||||
user = self.context["request"].user
|
||||
user = self.context["request"].user
|
||||
return app.get_launch_url(user)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Application
|
||||
fields = [
|
||||
"pk",
|
||||
@ -197,9 +189,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if not should_cache:
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
if should_cache:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
||||
if not allowed_applications:
|
||||
LOGGER.debug("Caching allowed application list")
|
||||
allowed_applications = self._get_allowed_applications(queryset)
|
||||
cache.set(
|
||||
user_app_cache_key(self.request.user.pk),
|
||||
@ -226,10 +218,21 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set application icon"""
|
||||
app: Application = self.get_object()
|
||||
return set_file(request, app, "meta_icon")
|
||||
icon = request.FILES.get("file", None)
|
||||
clear = request.data.get("clear", "false").lower() == "true"
|
||||
if clear:
|
||||
# .delete() saves the model by default
|
||||
app.meta_icon.delete()
|
||||
return Response({})
|
||||
if icon:
|
||||
app.meta_icon = icon
|
||||
app.save()
|
||||
return Response({})
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
@permission_required("authentik_core.change_application")
|
||||
@extend_schema(
|
||||
@ -245,22 +248,29 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
# pylint: disable=unused-argument
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set application icon (as URL)"""
|
||||
app: Application = self.get_object()
|
||||
return set_file_url(request, app, "meta_icon")
|
||||
url = request.data.get("url", None)
|
||||
if url is None:
|
||||
return HttpResponseBadRequest()
|
||||
app.meta_icon.name = url
|
||||
app.save()
|
||||
return Response({})
|
||||
|
||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
def metrics(self, request: Request, slug: str):
|
||||
"""Metrics for application logins"""
|
||||
app = self.get_object()
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION,
|
||||
context__authorized_application__pk=app.pk.hex,
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
@ -74,6 +74,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = AuthenticatedSession
|
||||
fields = [
|
||||
"uuid",
|
||||
|
@ -2,22 +2,15 @@
|
||||
from json import loads
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import Http404
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.api.utils import is_dict
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
@ -29,6 +22,7 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -55,6 +49,7 @@ class GroupSerializer(ModelSerializer):
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
@ -94,6 +89,7 @@ class GroupFilter(FilterSet):
|
||||
queryset=User.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
@ -112,16 +108,11 @@ class GroupFilter(FilterSet):
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||
|
||||
|
||||
class UserAccountSerializer(PassiveSerializer):
|
||||
"""Account adding/removing operations"""
|
||||
|
||||
pk = IntegerField(required=True)
|
||||
|
||||
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Group Viewset"""
|
||||
|
||||
@ -143,51 +134,3 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
if self.request.user.has_perm("authentik_core.view_group"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def add_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.add(user)
|
||||
return Response(status=204)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def remove_user(self, request: Request, pk: str) -> Response:
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
.filter(
|
||||
pk=request.data.get("pk"),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
raise Http404
|
||||
group.users.remove(user)
|
||||
return Response(status=204)
|
||||
|
@ -17,9 +17,8 @@ from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.api import ManagedSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, PassiveSerializer, TypeCreateSerializer
|
||||
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||
from authentik.core.expression import PropertyMappingEvaluator
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.policies.api.exec import PolicyTestSerializer
|
||||
|
||||
@ -42,13 +41,12 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
|
||||
def validate_expression(self, expression: str) -> str:
|
||||
"""Test Syntax"""
|
||||
evaluator = PropertyMappingEvaluator(
|
||||
self.instance,
|
||||
)
|
||||
evaluator = PropertyMappingEvaluator()
|
||||
evaluator.validate(expression)
|
||||
return expression
|
||||
|
||||
class Meta:
|
||||
|
||||
model = PropertyMapping
|
||||
fields = [
|
||||
"pk",
|
||||
@ -116,6 +114,7 @@ class PropertyMappingViewSet(
|
||||
],
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
# pylint: disable=unused-argument, invalid-name
|
||||
def test(self, request: Request, pk: str) -> Response:
|
||||
"""Test Property Mapping"""
|
||||
mapping: PropertyMapping = self.get_object()
|
||||
@ -139,9 +138,7 @@ class PropertyMappingViewSet(
|
||||
self.request,
|
||||
**test_params.validated_data.get("context", {}),
|
||||
)
|
||||
response_data["result"] = dumps(
|
||||
sanitize_item(result), indent=(4 if format_result else None)
|
||||
)
|
||||
response_data["result"] = dumps(result, indent=(4 if format_result else None))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
response_data["result"] = str(exc)
|
||||
response_data["successful"] = False
|
||||
|
@ -31,6 +31,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Provider
|
||||
fields = [
|
||||
"pk",
|
||||
|
@ -2,11 +2,10 @@
|
||||
from typing import Iterable
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, SerializerMethodField
|
||||
@ -14,17 +13,10 @@ from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Source, UserSourceConnection
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.lib.utils.file import (
|
||||
FilePathSerializer,
|
||||
FileUploadSerializer,
|
||||
set_file,
|
||||
set_file_url,
|
||||
)
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
|
||||
@ -36,7 +28,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
managed = ReadOnlyField()
|
||||
component = SerializerMethodField()
|
||||
icon = ReadOnlyField(source="get_icon")
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
@ -46,6 +37,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Source
|
||||
fields = [
|
||||
"pk",
|
||||
@ -62,7 +54,6 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"user_matching_mode",
|
||||
"managed",
|
||||
"user_path_template",
|
||||
"icon",
|
||||
]
|
||||
|
||||
|
||||
@ -84,47 +75,6 @@ class SourceViewSet(
|
||||
def get_queryset(self): # pragma: no cover
|
||||
return Source.objects.select_subclasses()
|
||||
|
||||
@permission_required("authentik_core.change_source")
|
||||
@extend_schema(
|
||||
request={
|
||||
"multipart/form-data": FileUploadSerializer,
|
||||
},
|
||||
responses={
|
||||
200: OpenApiResponse(description="Success"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=True,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
parser_classes=(MultiPartParser,),
|
||||
)
|
||||
def set_icon(self, request: Request, slug: str):
|
||||
"""Set source icon"""
|
||||
source: Source = self.get_object()
|
||||
return set_file(request, source, "icon")
|
||||
|
||||
@permission_required("authentik_core.change_source")
|
||||
@extend_schema(
|
||||
request=FilePathSerializer,
|
||||
responses={
|
||||
200: OpenApiResponse(description="Success"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=True,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
methods=["POST"],
|
||||
)
|
||||
def set_icon_url(self, request: Request, slug: str):
|
||||
"""Set source icon (as URL)"""
|
||||
source: Source = self.get_object()
|
||||
return set_file_url(request, source, "icon")
|
||||
|
||||
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def types(self, request: Request) -> Response:
|
||||
|
@ -39,6 +39,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Token
|
||||
fields = [
|
||||
"pk",
|
||||
@ -111,6 +112,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
}
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
|
||||
# pylint: disable=unused-argument
|
||||
def view_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Return token key and log access"""
|
||||
token: Token = self.get_object()
|
||||
@ -132,11 +134,11 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
# pylint: disable=unused-argument
|
||||
def set_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Set token key. Action is logged as event. `authentik_core.set_token_key` permission
|
||||
is required."""
|
||||
"""Return token key and log access"""
|
||||
token: Token = self.get_object()
|
||||
key = request.data.get("key")
|
||||
key = request.POST.get("key")
|
||||
if not key:
|
||||
return Response(status=400)
|
||||
token.key = key
|
||||
|
@ -53,7 +53,7 @@ class UsedByMixin:
|
||||
responses={200: UsedBySerializer(many=True)},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=invalid-name, unused-argument, too-many-locals
|
||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Get a list of all objects that use this object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
|
@ -4,9 +4,6 @@ from json import loads
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
@ -43,13 +40,13 @@ from rest_framework.serializers import (
|
||||
PrimaryKeyRelatedField,
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.groups import GroupSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||
from authentik.core.middleware import (
|
||||
@ -60,7 +57,6 @@ from authentik.core.models import (
|
||||
USER_ATTRIBUTE_SA,
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
AuthenticatedSession,
|
||||
Group,
|
||||
Token,
|
||||
TokenIntents,
|
||||
@ -78,25 +74,6 @@ from authentik.tenants.models import Tenant
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class UserGroupSerializer(ModelSerializer):
|
||||
"""Simplified Group Serializer for user's groups"""
|
||||
|
||||
attributes = JSONField(required=False)
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
"num_pk",
|
||||
"name",
|
||||
"is_superuser",
|
||||
"parent",
|
||||
"parent_name",
|
||||
"attributes",
|
||||
]
|
||||
|
||||
|
||||
class UserSerializer(ModelSerializer):
|
||||
"""User Serializer"""
|
||||
|
||||
@ -106,9 +83,9 @@ class UserSerializer(ModelSerializer):
|
||||
groups = PrimaryKeyRelatedField(
|
||||
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all()
|
||||
)
|
||||
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
|
||||
groups_obj = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
|
||||
username = CharField(max_length=150)
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Validate path"""
|
||||
@ -120,6 +97,7 @@ class UserSerializer(ModelSerializer):
|
||||
return path
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -171,6 +149,7 @@ class UserSelfSerializer(ModelSerializer):
|
||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -201,44 +180,38 @@ class SessionUserSerializer(PassiveSerializer):
|
||||
class UserMetricsSerializer(PassiveSerializer):
|
||||
"""User Metrics"""
|
||||
|
||||
logins = SerializerMethodField()
|
||||
logins_failed = SerializerMethodField()
|
||||
authorizations = SerializerMethodField()
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
authorizations_per_1h = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED, context__username=user.username
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
@extend_schema_field(CoordinateSerializer(many=True))
|
||||
def get_authorizations(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
def get_authorizations_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
user = self.context["user"]
|
||||
return (
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
||||
get_objects_for_user(user, "authentik_events.view_event")
|
||||
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||
.get_events_per_hour()
|
||||
)
|
||||
|
||||
|
||||
@ -270,6 +243,7 @@ class UsersFilter(FilterSet):
|
||||
queryset=Group.objects.all(),
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
@ -400,12 +374,13 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
response["token"] = token.key
|
||||
return Response(response)
|
||||
except IntegrityError as exc:
|
||||
except (IntegrityError) as exc:
|
||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_me(self, request: Request) -> Response:
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name
|
||||
def me(self, request: Request) -> Response:
|
||||
"""Get information about current user"""
|
||||
context = {"request": request}
|
||||
serializer = SessionUserSerializer(
|
||||
@ -433,6 +408,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def set_password(self, request: Request, pk: int) -> Response:
|
||||
"""Set password for user"""
|
||||
user: User = self.get_object()
|
||||
@ -450,6 +426,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
||||
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def metrics(self, request: Request, pk: int) -> Response:
|
||||
"""User metrics per 1h"""
|
||||
user: User = self.get_object()
|
||||
@ -465,6 +442,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
link, _ = self._create_recovery_link()
|
||||
@ -489,9 +467,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=invalid-name, unused-argument
|
||||
def recovery_email(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their accounts"""
|
||||
for_user: User = self.get_object()
|
||||
for_user = self.get_object()
|
||||
if for_user.email == "":
|
||||
LOGGER.debug("User doesn't have an email address")
|
||||
return Response(status=404)
|
||||
@ -509,9 +488,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
email_stage: EmailStage = stages.first()
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(email_stage.subject),
|
||||
to=[for_user.email],
|
||||
template_name=email_stage.template,
|
||||
language=for_user.locale(request),
|
||||
to=[for_user.email],
|
||||
template_context={
|
||||
"url": link,
|
||||
"user": for_user,
|
||||
@ -562,14 +540,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def partial_update(self, request: Request, *args, **kwargs) -> Response:
|
||||
response = super().partial_update(request, *args, **kwargs)
|
||||
instance: User = self.get_object()
|
||||
if not instance.is_active:
|
||||
sessions = AuthenticatedSession.objects.filter(user=instance)
|
||||
session_ids = sessions.values_list("session_key", flat=True)
|
||||
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
|
||||
sessions.delete()
|
||||
LOGGER.debug("Deleted user's sessions", user=instance.username)
|
||||
return response
|
||||
|
@ -2,7 +2,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Model
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.fields import BooleanField, CharField, FileField, IntegerField
|
||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
||||
|
||||
|
||||
@ -23,10 +23,17 @@ class PassiveSerializer(Serializer):
|
||||
return Model()
|
||||
|
||||
|
||||
class PropertyMappingPreviewSerializer(PassiveSerializer):
|
||||
"""Preview how the current user is mapped via the property mappings selected in a provider"""
|
||||
class FileUploadSerializer(PassiveSerializer):
|
||||
"""Serializer to upload file"""
|
||||
|
||||
preview = JSONField(read_only=True)
|
||||
file = FileField(required=False)
|
||||
clear = BooleanField(default=False)
|
||||
|
||||
|
||||
class FilePathSerializer(PassiveSerializer):
|
||||
"""Serializer to upload file"""
|
||||
|
||||
url = CharField()
|
||||
|
||||
|
||||
class MetaNameSerializer(PassiveSerializer):
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""authentik core app config"""
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.blueprints.manager import ManagedAppConfig
|
||||
|
||||
|
||||
class AuthentikCoreConfig(ManagedAppConfig):
|
||||
|
@ -1,33 +1,29 @@
|
||||
"""Property Mapping Evaluator"""
|
||||
from traceback import format_tb
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models import Model
|
||||
from django.http import HttpRequest
|
||||
from guardian.utils import get_anonymous_user
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import PropertyMapping, User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
|
||||
class PropertyMappingEvaluator(BaseEvaluator):
|
||||
"""Custom Evaluator that adds some different context variables."""
|
||||
"""Custom Evalautor that adds some different context variables."""
|
||||
|
||||
def __init__(
|
||||
def set_context(
|
||||
self,
|
||||
model: Model,
|
||||
user: Optional[User] = None,
|
||||
request: Optional[HttpRequest] = None,
|
||||
user: Optional[User],
|
||||
request: Optional[HttpRequest],
|
||||
mapping: PropertyMapping,
|
||||
**kwargs,
|
||||
):
|
||||
if hasattr(model, "name"):
|
||||
_filename = model.name
|
||||
else:
|
||||
_filename = str(model)
|
||||
super().__init__(filename=_filename)
|
||||
req = PolicyRequest(user=User())
|
||||
req.obj = model
|
||||
"""Update context with context from PropertyMapping's evaluate"""
|
||||
req = PolicyRequest(user=get_anonymous_user())
|
||||
req.obj = mapping
|
||||
if user:
|
||||
req.user = user
|
||||
self._context["user"] = user
|
||||
@ -38,7 +34,7 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||
|
||||
def handle_error(self, exc: Exception, expression_source: str):
|
||||
"""Exception Handler"""
|
||||
error_string = exception_to_string(exc)
|
||||
error_string = "\n".join(format_tb(exc.__traceback__) + [str(exc)])
|
||||
event = Event.new(
|
||||
EventAction.PROPERTY_MAPPING_EXCEPTION,
|
||||
expression=expression_source,
|
@ -1,17 +1,15 @@
|
||||
"""authentik shell command"""
|
||||
import code
|
||||
import platform
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from django.apps import apps
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik import __version__
|
||||
from authentik.core.models import User
|
||||
from authentik.events.middleware import should_log_model
|
||||
from authentik.events.middleware import IGNORED_MODELS
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import model_to_dict
|
||||
|
||||
@ -20,7 +18,7 @@ BANNER_TEXT = """### authentik shell ({authentik})
|
||||
node=platform.node(),
|
||||
python=platform.python_version(),
|
||||
arch=platform.machine(),
|
||||
authentik=get_full_version(),
|
||||
authentik=__version__,
|
||||
)
|
||||
|
||||
|
||||
@ -49,9 +47,10 @@ class Command(BaseCommand):
|
||||
return namespace
|
||||
|
||||
@staticmethod
|
||||
# pylint: disable=unused-argument
|
||||
def post_save_handler(sender, instance: Model, created: bool, **_):
|
||||
"""Signal handler for all object's post_save"""
|
||||
if not should_log_model(instance):
|
||||
if isinstance(instance, IGNORED_MODELS):
|
||||
return
|
||||
|
||||
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
|
||||
@ -64,9 +63,10 @@ class Command(BaseCommand):
|
||||
).save()
|
||||
|
||||
@staticmethod
|
||||
# pylint: disable=unused-argument
|
||||
def pre_delete_handler(sender, instance: Model, **_):
|
||||
"""Signal handler for all object's pre_delete"""
|
||||
if not should_log_model(instance): # pragma: no cover
|
||||
if isinstance(instance, IGNORED_MODELS): # pragma: no cover
|
||||
return
|
||||
|
||||
Event.new(EventAction.MODEL_DELETED, model=model_to_dict(instance)).set_user(
|
||||
@ -89,21 +89,6 @@ class Command(BaseCommand):
|
||||
exec(options["command"], namespace) # nosec # noqa
|
||||
return
|
||||
|
||||
try:
|
||||
hook = sys.__interactivehook__
|
||||
except AttributeError:
|
||||
# Match the behavior of the cpython shell where a missing
|
||||
# sys.__interactivehook__ is ignored.
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
hook()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# Match the behavior of the cpython shell where an error in
|
||||
# sys.__interactivehook__ prints a warning and the exception
|
||||
# and continues.
|
||||
print("Failed calling sys.__interactivehook__")
|
||||
traceback.print_exc()
|
||||
# Try to enable tab-complete
|
||||
try:
|
||||
import readline
|
||||
|
@ -1,10 +1,9 @@
|
||||
"""authentik admin Middleware to impersonate users"""
|
||||
from contextvars import ContextVar
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable
|
||||
from uuid import uuid4
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils.translation import activate
|
||||
from sentry_sdk.api import set_tag
|
||||
from structlog.contextvars import STRUCTLOG_KEY_PREFIX
|
||||
|
||||
@ -14,9 +13,9 @@ RESPONSE_HEADER_ID = "X-authentik-id"
|
||||
KEY_AUTH_VIA = "auth_via"
|
||||
KEY_USER = "user"
|
||||
|
||||
CTX_REQUEST_ID = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + "request_id", default=None)
|
||||
CTX_HOST = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + "host", default=None)
|
||||
CTX_AUTH_VIA = ContextVar[Optional[str]](STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None)
|
||||
CTX_REQUEST_ID = ContextVar(STRUCTLOG_KEY_PREFIX + "request_id", default=None)
|
||||
CTX_HOST = ContextVar(STRUCTLOG_KEY_PREFIX + "host", default=None)
|
||||
CTX_AUTH_VIA = ContextVar(STRUCTLOG_KEY_PREFIX + KEY_AUTH_VIA, default=None)
|
||||
|
||||
|
||||
class ImpersonateMiddleware:
|
||||
@ -30,10 +29,6 @@ class ImpersonateMiddleware:
|
||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||
# No permission checks are done here, they need to be checked before
|
||||
# SESSION_KEY_IMPERSONATE_USER is set.
|
||||
if request.user.is_authenticated:
|
||||
locale = request.user.locale(request)
|
||||
if locale != "":
|
||||
activate(locale)
|
||||
|
||||
if SESSION_KEY_IMPERSONATE_USER in request.session:
|
||||
request.user = request.session[SESSION_KEY_IMPERSONATE_USER]
|
||||
|
@ -14,6 +14,7 @@ import authentik.core.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
@ -43,10 +44,7 @@ class Migration(migrations.Migration):
|
||||
"is_superuser",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text=(
|
||||
"Designates that this user has all permissions without explicitly"
|
||||
" assigning them."
|
||||
),
|
||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
||||
verbose_name="superuser status",
|
||||
),
|
||||
),
|
||||
@ -54,9 +52,7 @@ class Migration(migrations.Migration):
|
||||
"username",
|
||||
models.CharField(
|
||||
error_messages={"unique": "A user with that username already exists."},
|
||||
help_text=(
|
||||
"Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only."
|
||||
),
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
||||
@ -87,10 +83,7 @@ class Migration(migrations.Migration):
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
default=True,
|
||||
help_text=(
|
||||
"Designates whether this user should be treated as active. Unselect"
|
||||
" this instead of deleting accounts."
|
||||
),
|
||||
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
||||
verbose_name="active",
|
||||
),
|
||||
),
|
||||
|
55
authentik/core/migrations/0002_auto_20200523_1133.py
Normal file
55
authentik/core/migrations/0002_auto_20200523_1133.py
Normal file
@ -0,0 +1,55 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-23 11:33
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0003_auto_20200523_1133"),
|
||||
("authentik_core", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="application",
|
||||
name="skip_authorization",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="source",
|
||||
name="authentication_flow",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text="Flow to use when authenticating existing users.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="source_authentication",
|
||||
to="authentik_flows.Flow",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="source",
|
||||
name="enrollment_flow",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text="Flow to use when enrolling new users.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="source_enrollment",
|
||||
to="authentik_flows.Flow",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="provider",
|
||||
name="authorization_flow",
|
||||
field=models.ForeignKey(
|
||||
help_text="Flow used when authorizing this provider.",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="provider_authorization",
|
||||
to="authentik_flows.Flow",
|
||||
),
|
||||
),
|
||||
]
|
@ -51,6 +51,7 @@ def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_core", "0002_auto_20200523_1133"),
|
||||
("authentik_core", "0003_default_user"),
|
||||
@ -171,10 +172,7 @@ class Migration(migrations.Migration):
|
||||
name="groups",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text=(
|
||||
"The groups this user belongs to. A user will get all permissions granted to"
|
||||
" each of their groups."
|
||||
),
|
||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.Group",
|
||||
|
57
authentik/core/migrations/0003_default_user.py
Normal file
57
authentik/core/migrations/0003_default_user.py
Normal file
@ -0,0 +1,57 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-23 16:40
|
||||
from os import environ
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
User = apps.get_model("authentik_core", "User")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
akadmin, _ = User.objects.using(db_alias).get_or_create(
|
||||
username="akadmin", email="root@localhost", name="authentik Default Admin"
|
||||
)
|
||||
password = None
|
||||
if "TF_BUILD" in environ or settings.TEST:
|
||||
password = "akadmin" # noqa # nosec
|
||||
if "AK_ADMIN_PASS" in environ:
|
||||
password = environ["AK_ADMIN_PASS"]
|
||||
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
|
||||
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
|
||||
if password:
|
||||
akadmin.password = make_password(password)
|
||||
else:
|
||||
akadmin.password = make_password(None)
|
||||
akadmin.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0002_auto_20200523_1133"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="is_superuser",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="is_staff",
|
||||
),
|
||||
migrations.RunPython(create_default_user),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="is_superuser",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user", name="is_staff", field=models.BooleanField(default=False)
|
||||
),
|
||||
]
|
28
authentik/core/migrations/0004_auto_20200703_2213.py
Normal file
28
authentik/core/migrations/0004_auto_20200703_2213.py
Normal file
@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.0.7 on 2020-07-03 22:13
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0003_default_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="application",
|
||||
options={
|
||||
"verbose_name": "Application",
|
||||
"verbose_name_plural": "Applications",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="user",
|
||||
options={
|
||||
"permissions": (("reset_user_password", "Reset Password"),),
|
||||
"verbose_name": "User",
|
||||
"verbose_name_plural": "Users",
|
||||
},
|
||||
),
|
||||
]
|
24
authentik/core/migrations/0005_token_intent.py
Normal file
24
authentik/core/migrations/0005_token_intent.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.0.7 on 2020-07-05 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0004_auto_20200703_2213"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="token",
|
||||
name="intent",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("verification", "Intent Verification"),
|
||||
("api", "Intent Api"),
|
||||
],
|
||||
default="verification",
|
||||
),
|
||||
),
|
||||
]
|
18
authentik/core/migrations/0006_auto_20200709_1608.py
Normal file
18
authentik/core/migrations/0006_auto_20200709_1608.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.0.8 on 2020-07-09 16:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0005_token_intent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="source",
|
||||
name="slug",
|
||||
field=models.SlugField(help_text="Internal source name, used in URLs.", unique=True),
|
||||
),
|
||||
]
|
18
authentik/core/migrations/0007_auto_20200815_1841.py
Normal file
18
authentik/core/migrations/0007_auto_20200815_1841.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1 on 2020-08-15 18:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0006_auto_20200709_1608"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="first_name",
|
||||
field=models.CharField(blank=True, max_length=150, verbose_name="first name"),
|
||||
),
|
||||
]
|
36
authentik/core/migrations/0008_auto_20200824_1532.py
Normal file
36
authentik/core/migrations/0008_auto_20200824_1532.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Generated by Django 3.1 on 2020-08-24 15:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
("authentik_core", "0007_auto_20200815_1841"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="groups",
|
||||
field=models.ManyToManyField(to="authentik_core.Group"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="groups",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.Group",
|
||||
verbose_name="groups",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="pb_groups",
|
||||
field=models.ManyToManyField(to="authentik_core.Group"),
|
||||
),
|
||||
]
|
59
authentik/core/migrations/0009_group_is_superuser.py
Normal file
59
authentik/core/migrations/0009_group_is_superuser.py
Normal file
@ -0,0 +1,59 @@
|
||||
# Generated by Django 3.1.1 on 2020-09-15 19:53
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
import authentik.core.models
|
||||
|
||||
|
||||
def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Group = apps.get_model("authentik_core", "Group")
|
||||
User = apps.get_model("authentik_core", "User")
|
||||
|
||||
# Creates a default admin group
|
||||
group, _ = Group.objects.using(db_alias).get_or_create(
|
||||
is_superuser=True,
|
||||
defaults={
|
||||
"name": "authentik Admins",
|
||||
},
|
||||
)
|
||||
group.users.set(User.objects.filter(username="akadmin"))
|
||||
group.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0008_auto_20200824_1532"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="is_superuser",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="is_staff",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="pb_groups",
|
||||
field=models.ManyToManyField(related_name="users", to="authentik_core.Group"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="group",
|
||||
name="is_superuser",
|
||||
field=models.BooleanField(
|
||||
default=False, help_text="Users added to this group will be superusers."
|
||||
),
|
||||
),
|
||||
migrations.RunPython(create_default_admin_group),
|
||||
migrations.AlterModelManagers(
|
||||
name="user",
|
||||
managers=[
|
||||
("objects", authentik.core.models.UserManager()),
|
||||
],
|
||||
),
|
||||
]
|
24
authentik/core/migrations/0010_auto_20200917_1021.py
Normal file
24
authentik/core/migrations/0010_auto_20200917_1021.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.1 on 2020-09-17 10:21
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0009_group_is_superuser"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="user",
|
||||
options={
|
||||
"permissions": (
|
||||
("reset_user_password", "Reset Password"),
|
||||
("impersonate", "Can impersonate other users"),
|
||||
),
|
||||
"verbose_name": "User",
|
||||
"verbose_name_plural": "Users",
|
||||
},
|
||||
),
|
||||
]
|
19
authentik/core/migrations/0011_provider_name_temp.py
Normal file
19
authentik/core/migrations/0011_provider_name_temp.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-03 17:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0010_auto_20200917_1021"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="provider",
|
||||
name="name_temp",
|
||||
field=models.TextField(default=""),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
20
authentik/core/migrations/0012_auto_20201003_1737.py
Normal file
20
authentik/core/migrations/0012_auto_20201003_1737.py
Normal file
@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-03 17:37
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0011_provider_name_temp"),
|
||||
("authentik_providers_oauth2", "0006_remove_oauth2provider_name"),
|
||||
("authentik_providers_saml", "0006_remove_samlprovider_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="provider",
|
||||
old_name="name_temp",
|
||||
new_name="name",
|
||||
),
|
||||
]
|
@ -17,6 +17,7 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_core", "0012_auto_20201003_1737"),
|
||||
("authentik_core", "0013_auto_20201003_2132"),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user