Compare commits

..

3 Commits

Author SHA1 Message Date
4d791f4fef release: 2022.12.3 2023-03-02 20:18:21 +01:00
c03a069f02 security: fix CVE-2023-26481 (#4832)
fix CVE-2023-26481

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-03-02 20:16:01 +01:00
040adb8ce7 website: always show build version in version dropdown
Signed-off-by: Jens Langhammer <jens@goauthentik.io>

#3940

# Conflicts:
#	website/docusaurus.config.js
2023-02-16 14:40:07 +01:00
1233 changed files with 32235 additions and 59142 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.4.1
current_version = 2022.12.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -6,4 +6,3 @@ dist/**
build/**
build_docs/**
Dockerfile
authentik/enterprise

View File

@ -7,14 +7,8 @@ charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.html]
[html]
indent_size = 2
[*.{yaml,yml}]
[yaml]
indent_size = 2
[*.go]
indent_style = tab
[Makefile]
indent_style = tab

View File

@ -1,9 +1,10 @@
---
name: Bug report
about: Create a report to help us improve
title: ""
title: ''
labels: bug
assignees: ""
assignees: ''
---
**Describe the bug**
@ -11,7 +12,6 @@ A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
@ -27,9 +27,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,9 +1,10 @@
---
name: Feature request
about: Suggest an idea for this project
title: ""
title: ''
labels: enhancement
assignees: ""
assignees: ''
---
**Is your feature request related to a problem? Please describe.**

View File

@ -1,9 +1,10 @@
---
name: Question
about: Ask a question about a feature or specific configuration
title: ""
title: ''
labels: question
assignees: ""
assignees: ''
---
**Describe your question/**
@ -19,9 +20,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,5 +1,5 @@
name: "Comment usage instructions on PRs"
description: "Comment usage instructions on PRs"
name: 'Comment usage instructions on PRs'
description: 'Comment usage instructions on PRs'
inputs:
tag:
@ -17,7 +17,7 @@ runs:
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
comment-author: 'github-actions[bot]'
body-includes: authentik PR Installation instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
@ -38,14 +38,6 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -62,17 +54,6 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -1,5 +1,5 @@
name: "Prepare docker environment variables"
description: "Prepare docker environment variables"
name: 'Prepare docker environment variables'
description: 'Prepare docker environment variables'
outputs:
shouldBuild:
@ -17,9 +17,6 @@ outputs:
sha:
description: "sha"
value: ${{ steps.ev.outputs.sha }}
shortHash:
description: "shortHash"
value: ${{ steps.ev.outputs.shortHash }}
version:
description: "version"
value: ${{ steps.ev.outputs.version }}
@ -56,7 +53,6 @@ runs:
print("branchNameContainer=%s" % safe_branch_name, file=_output)
print("timestamp=%s" % int(time()), file=_output)
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
print("shouldBuild=%s" % should_build, file=_output)
print("version=%s" % version, file=_output)
print("versionFamily=%s" % version_family, file=_output)

View File

@ -1,10 +1,5 @@
name: "Setup authentik testing environment"
description: "Setup authentik testing environment"
inputs:
postgresql_tag:
description: "Optional postgresql image tag"
default: "12"
name: 'Setup authentik testing environment'
description: 'Setup authentik testing environment'
runs:
using: "composite"
@ -18,18 +13,17 @@ runs:
- name: Setup python and restore poetry
uses: actions/setup-python@v3
with:
python-version: "3.11"
cache: "poetry"
python-version: '3.11'
cache: 'poetry'
- name: Setup node
uses: actions/setup-node@v3.1.0
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_tag }}
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry env use python3.11
poetry install

View File

@ -1,23 +1,23 @@
version: "3.7"
version: '3.7'
services:
postgresql:
container_name: postgres
image: library/postgres:${PSQL_TAG:-12}
image: library/postgres:12
volumes:
- db-data:/var/lib/postgresql/data
- db-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
POSTGRES_DB: authentik
ports:
- 5432:5432
- 5432:5432
restart: always
redis:
container_name: redis
image: library/redis
ports:
- 6379:6379
- 6379:6379
restart: always
volumes:

11
.github/codecov.yml vendored
View File

@ -1,10 +1,3 @@
coverage:
status:
project:
default:
target: auto
# adjust accordingly based on how flaky your tests are
# this allows a 1% drop from the previous base commit coverage
threshold: 1%
notify:
after_n_builds: 3
precision: 2
round: up

View File

@ -1 +0,0 @@
authentic->authentik

120
.github/dependabot.yml vendored
View File

@ -1,62 +1,62 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"

View File

@ -5,20 +5,15 @@ Please check the [Contributing guidelines](https://github.com/goauthentik/authen
-->
# Details
- **Does this resolve an issue?**
Resolves #
* **Does this resolve an issue?**
Resolves #
## Changes
### New Features
- Adds feature which does x, y, and z.
* Adds feature which does x, y, and z.
### Breaking Changes
- Adds breaking change which causes \<issue\>.
* Adds breaking change which causes \<issue\>.
## Additional
Any further notes or comments you want to make.

1
.github/stale.yml vendored
View File

@ -16,4 +16,3 @@ markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
only: issues

View File

@ -6,11 +6,11 @@ git:
source_language: en
source_file: web/src/locales/en.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "web/src/locales/<lang>.po"
translation_files_expression: 'web/src/locales/<lang>.po'
- filter_type: file
# all supported i18n types: https://docs.transifex.com/formats
file_format: PO
source_language: en
source_file: locale/en/LC_MESSAGES/django.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'

View File

@ -23,14 +23,12 @@ jobs:
fail-fast: false
matrix:
job:
- bandit
- black
- codespell
- isort
- pending-migrations
- pylint
- black
- isort
- bandit
- pyright
- ruff
- pending-migrations
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -61,7 +59,7 @@ jobs:
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
git checkout $(git describe --abbrev=0 --match 'version/*')
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (ensure stable deps are installed)
@ -81,21 +79,11 @@ jobs:
- name: migrate to latest
run: poetry run python -m lifecycle.migrate
test-unittest:
name: test-unittest - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
psql:
- 11-alpine
- 12-alpine
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_tag: ${{ matrix.psql }}
- name: run unittest
run: |
poetry run make test
@ -106,7 +94,6 @@ jobs:
flags: unit
test-integration:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
@ -115,34 +102,14 @@ jobs:
uses: helm/kind-action@v1.5.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
poetry run make test-integration
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
with:
flags: integration
test-e2e:
name: test-e2e (${{ matrix.job.name }})
test-e2e-provider:
runs-on: ubuntu-latest
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
job:
- name: proxy
glob: tests/e2e/test_provider_proxy*
- name: oauth
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
- name: oauth-oidc
glob: tests/e2e/test_provider_oidc*
- name: saml
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: radius
glob: tests/e2e/test_provider_radius*
- name: flows
glob: tests/e2e/test_flows*
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
@ -164,7 +131,36 @@ jobs:
npm run build
- name: run e2e
run: |
poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run make test-e2e-provider
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
with:
flags: e2e
test-e2e-rest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web/
run: |
npm ci
make -C .. gen-client-ts
npm run build
- name: run e2e
run: |
poetry run make test-e2e-rest
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
@ -177,7 +173,8 @@ jobs:
- test-migrations-from-stable
- test-unittest
- test-integration
- test-e2e
- test-e2e-rest
- test-e2e-provider
runs-on: ubuntu-latest
steps:
- run: echo mark
@ -185,6 +182,11 @@ jobs:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
arch:
- 'linux/amd64'
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
@ -203,8 +205,8 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v4
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
@ -212,51 +214,14 @@ jobs:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
- name: Comment on PR
if: github.event_name == 'pull_request'
continue-on-error: true
uses: ./.github/actions/comment-pr-instructions
with:
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
build-arm64:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v4
with:
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/arm64
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}

View File

@ -15,9 +15,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Prepare and generate API
run: |
# Create folder structure for go embeds
@ -28,15 +28,13 @@ jobs:
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
args: --timeout 5000s
test-unittest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Generate API
run: make gen-client-go
- name: Go unittests
@ -49,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- run: echo mark
build-container:
build:
timeout-minutes: 120
needs:
- ci-outpost-mark
@ -59,7 +57,8 @@ jobs:
type:
- proxy
- ldap
- radius
arch:
- 'linux/amd64'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -81,20 +80,20 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@v4
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
file: ${{ matrix.type }}.Dockerfile
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/amd64,linux/arm64
context: .
build-binary:
platforms: ${{ matrix.arch }}
build-outpost-binary:
timeout-minutes: 120
needs:
- ci-outpost-mark
@ -105,18 +104,17 @@ jobs:
type:
- proxy
- ldap
- radius
goos: [linux]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v3.6.0
go-version: "^1.17"
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go
@ -131,3 +129,7 @@ jobs:
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- uses: actions/upload-artifact@v3
with:
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@ -15,10 +15,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -31,10 +31,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -47,10 +47,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -63,10 +63,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: |
@ -95,10 +95,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci

View File

@ -15,56 +15,19 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: prettier
working-directory: website/
run: npm run prettier-check
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: test
working-directory: website/
run: npm test
build:
runs-on: ubuntu-latest
name: ${{ matrix.job }}
strategy:
fail-fast: false
matrix:
job:
- build
- build-docs-only
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
- name: build
working-directory: website/
run: npm run ${{ matrix.job }}
ci-website-mark:
needs:
- lint-prettier
- test
- build
runs-on: ubuntu-latest
steps:
- run: echo mark

View File

@ -2,12 +2,12 @@ name: "CodeQL"
on:
push:
branches: [main, "*", next, version*]
branches: [ main, '*', next, version* ]
pull_request:
# The branches below must be a subset of the branches above
branches: [main]
branches: [ main ]
schedule:
- cron: "30 6 * * 5"
- cron: '30 6 * * 5'
jobs:
analyze:
@ -21,40 +21,40 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ["go", "javascript", "python"]
language: [ 'go', 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -2,7 +2,7 @@ name: ghcr-retention
on:
schedule:
- cron: "0 0 * * *" # every day at midnight
- cron: '0 0 * * *' # every day at midnight
workflow_dispatch:
jobs:
@ -11,12 +11,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@v2
uses: sondrelg/container-retention-policy@v1
with:
image-names: dev-server,dev-ldap,dev-proxy
cut-off: One week ago UTC
account-type: org
org-name: goauthentik
untagged-only: false
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
skip-tags: gh-next,gh-main

View File

@ -27,11 +27,11 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v4
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
secrets: |
secrets:
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: |
@ -52,12 +52,11 @@ jobs:
type:
- proxy
- ldap
- radius
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
@ -76,8 +75,8 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v4
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
tags: |
@ -89,6 +88,9 @@ jobs:
ghcr.io/goauthentik/${{ matrix.type }}:latest
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost-binary:
@ -100,18 +102,17 @@ jobs:
type:
- proxy
- ldap
- radius
goos: [linux, darwin]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
- uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v3.6.0
go-version: "^1.17"
- uses: actions/setup-node@v3.5.1
with:
node-version: "20"
cache: "npm"
node-version: '16'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Build web
working-directory: web/
@ -173,5 +174,5 @@ jobs:
SENTRY_PROJECT: authentik
with:
version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist"
url_prefix: "~/static/dist"
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

View File

@ -3,7 +3,7 @@ name: authentik-on-tag
on:
push:
tags:
- "version/*"
- 'version/*'
jobs:
build:
@ -26,14 +26,14 @@ jobs:
id: get_version
uses: actions/github-script@v6
with:
github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
- name: Create Release
id: create_release
uses: actions/create-release@v1.1.4
env:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ steps.get_version.outputs.result }}

View File

@ -1,12 +1,12 @@
name: authentik-backend-translate-compile
on:
push:
branches: [main]
branches: [ main ]
paths:
- "/locale/"
- '/locale/'
pull_request:
paths:
- "/locale/"
- '/locale/'
workflow_dispatch:
env:
@ -19,17 +19,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
run: poetry run ./manage.py compilemessages
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
branch: compile-backend-translation
commit-message: "core: compile backend translations"
title: "core: compile backend translations"

View File

@ -1,21 +1,19 @@
name: authentik-web-api-publish
on:
push:
branches: [main]
branches: [ main ]
paths:
- "schema.yml"
- 'schema.yml'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.5.1
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
registry-url: "https://registry.npmjs.org"
node-version: '16'
registry-url: 'https://registry.npmjs.org'
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
@ -30,20 +28,14 @@ jobs:
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v5
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
team-reviewers: "@goauthentik/core"
author: authentik bot <github-bot@goauthentik.io>
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

3
.gitignore vendored
View File

@ -200,6 +200,3 @@ media/
.idea/
/gen-*/
data/
# Local Netlify folder
.netlify

View File

@ -1,20 +0,0 @@
{
"recommendations": [
"EditorConfig.EditorConfig",
"bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"golang.go",
"Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv",
"ms-python.black-formatter",
"ms-python.isort",
"ms-python.pylint",
"ms-python.python",
"ms-python.vscode-pylance",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx"
]
}

View File

@ -14,10 +14,7 @@
"webauthn",
"traefik",
"passwordless",
"kubernetes",
"sso",
"slo",
"scim",
"kubernetes"
],
"python.linting.pylintEnabled": true,
"todo-tree.tree.showCountsInTree": true,
@ -47,6 +44,5 @@
"url": "https://github.com/goauthentik/authentik/issues/<num>",
"ignoreCase": false
}
],
"go.testFlags": ["-count=1"]
]
}

View File

@ -20,7 +20,6 @@ The following is a set of guidelines for contributing to authentik and its compo
- [Reporting Bugs](#reporting-bugs)
- [Suggesting Enhancements](#suggesting-enhancements)
- [Your First Code Contribution](#your-first-code-contribution)
- [Help with the Docs](#help-with-the-docs)
- [Pull Requests](#pull-requests)
[Styleguides](#styleguides)
@ -60,18 +59,19 @@ These are the current packages:
authentik
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
├── api - General API Configuration (Routes, Schema and general API utilities)
├── blueprints - Handle managed models and their state.
├── core - Core authentik functionality, central routes, core Models
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
├── events - Event Log, middleware and signals to generate signals
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
├── lib - Generic library of functions, few dependencies on other packages.
├── managed - Handle managed models and their state.
├── outposts - Configure and deploy outposts on kubernetes and docker.
├── policies - General PolicyEngine
│   ├── dummy - A Dummy policy used for testing
│   ├── event_matcher - Match events based on different criteria
│   ├── expiry - Check when a user's password was last set
│   ├── expression - Execute any arbitrary python code
│   ├── hibp - Check a password against HaveIBeenPwned
│   ├── password - Check a password against several rules
│   └── reputation - Check the user's/client's reputation
├── providers
@ -136,9 +136,6 @@ authentik can be run locally, all though depending on which part you want to wor
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
### Help with the Docs
Contributions to the technical documentation are greatly appreciated. Open a PR if you have improvements to make or new content to add. If you have questions or suggestions about the documentation, open an Issue. No contribution is too small.
### Pull Requests
The process described here has several goals:
@ -158,19 +155,12 @@ While the prerequisites above must be satisfied prior to having your pull reques
## Styleguides
### PR naming
- Use the format of `<package>: <verb> <description>`
- See [here](#authentik-packages) for `package`
- Example: `providers/saml2: fix parsing of requests`
### Git Commit Messages
- Use the format of `<package>: <verb> <description>`
- See [here](#authentik-packages) for `package`
- Example: `providers/saml2: fix parsing of requests`
- Reference issues and pull requests liberally after the first line
- Naming of commits within a PR does not need to adhere to the guidelines as we squash merge PRs
### Python Styleguide

View File

@ -1,5 +1,5 @@
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
@ -10,7 +10,7 @@ WORKDIR /work/website
RUN npm ci && npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
COPY ./web /work/web/
COPY ./website /work/website/
@ -20,7 +20,7 @@ WORKDIR /work/web
RUN npm ci && npm run build
# Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.11.3-slim-bullseye AS poetry-locker
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
WORKDIR /work
COPY ./pyproject.toml /work
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.20.3-bullseye AS go-builder
FROM docker.io/golang:1.19.4-bullseye AS go-builder
WORKDIR /work
@ -47,10 +47,9 @@ COPY ./go.sum /work/go.sum
RUN go build -o /work/authentik ./cmd/server/
# Stage 5: MaxMind GeoIP
FROM docker.io/maxmindinc/geoipupdate:v5.0 as geoip
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
ENV GEOIPUPDATE_VERBOSE="true"
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
@ -58,11 +57,11 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "\
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
/usr/bin/entry.sh || exit 0 \
"
# Stage 6: Run
FROM docker.io/python:3.11.3-slim-bullseye AS final-image
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
@ -83,7 +82,7 @@ RUN apt-get update && \
# Required for runtime
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
apt-get install -y --no-install-recommends curl runit && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \
@ -96,13 +95,13 @@ RUN apt-get update && \
COPY ./authentik/ /authentik
COPY ./pyproject.toml /
COPY ./schemas /schemas
COPY ./xml /xml
COPY ./locale /locale
COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /work/authentik /bin/authentik
COPY --from=go-builder /work/authentik /authentik-proxy
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/help/ /website/help/

View File

@ -1,11 +1,6 @@
Copyright (c) 2023 Jens Langhammer
MIT License
Portions of this software are licensed as follows:
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
Copyright (c) 2022 Jens Langhammer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -3,24 +3,18 @@ PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
-S 'web/src/locales/**' \
authentik \
internal \
cmd \
web/src \
website/src \
website/blog \
website/developer-docs \
website/docs \
website/integrations \
website/src
all: lint-fix lint test gen web
test-integration:
coverage run manage.py test tests/integration
test-e2e-provider:
coverage run manage.py test tests/e2e/test_provider*
test-e2e-rest:
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
test-go:
go test -timeout 0 -v -race -cover ./...
@ -39,19 +33,28 @@ test:
coverage report
lint-fix:
isort authentik $(PY_SOURCES)
black authentik $(PY_SOURCES)
ruff authentik $(PY_SOURCES)
codespell -w $(CODESPELL_ARGS)
isort authentik tests scripts lifecycle
black authentik tests scripts lifecycle
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
authentik \
internal \
cmd \
web/src \
website/src \
website/docs \
website/developer-docs
lint:
pylint $(PY_SOURCES)
bandit -r $(PY_SOURCES) -x node_modules
pylint authentik tests lifecycle
bandit -r authentik tests lifecycle -x node_modules
golangci-lint run -v
migrate:
python -m lifecycle.migrate
run:
go run -v ./cmd/server/
i18n-extract: i18n-extract-core web-extract
i18n-extract-core:
@ -65,20 +68,15 @@ gen-build:
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
gen-changelog:
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff:
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
--markdown /local/diff.md \
/local/old_schema.yml /local/schema.yml
rm old_schema.yml
npx prettier --write diff.md
gen-clean:
rm -rf web/api/src/
@ -88,7 +86,7 @@ gen-client-ts:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/gen-ts-api \
@ -101,21 +99,20 @@ gen-client-ts:
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
gen-client-go:
mkdir -p ./gen-go-api ./gen-go-api/templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
cp schema.yml ./gen-go-api/
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
mkdir -p templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
docker run \
--rm -v ${PWD}/gen-go-api:/local \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
-i /local/schema.yml \
-g go \
-o /local/ \
-o /local/gen-go-api \
-c /local/config.yaml
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
rm -rf config.yaml ./templates/
gen-dev-config:
python -m scripts.generate_config
@ -173,6 +170,7 @@ website-watch:
# These targets are use by GitHub actions to allow usage of matrix
# which makes the YAML File a lot smaller
PY_SOURCES=authentik tests lifecycle
ci--meta-debug:
python -V
node --version
@ -183,12 +181,6 @@ ci-pylint: ci--meta-debug
ci-black: ci--meta-debug
black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug
ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug
codespell $(CODESPELL_ARGS) -s
ci-isort: ci--meta-debug
isort --check $(PY_SOURCES)

View File

@ -15,13 +15,13 @@
## What is authentik?
Authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. Authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
## Installation
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
## Screenshots
@ -32,16 +32,12 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
## Development
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
## Security
See [SECURITY.md](SECURITY.md)
## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
## Sponsors
This project is proudly sponsored by:
@ -53,3 +49,11 @@ This project is proudly sponsored by:
</p>
DigitalOcean provides development and testing resources for authentik.
<p>
<a href="https://www.netlify.com">
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
</a>
</p>
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.

View File

@ -6,8 +6,8 @@ Authentik takes security very seriously. We follow the rules of [responsible dis
| Version | Supported |
| --------- | ------------------ |
| 2023.2.x | :white_check_mark: |
| 2023.3.x | :white_check_mark: |
| 2022.11.x | :white_check_mark: |
| 2022.12.x | :white_check_mark: |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
from typing import Optional
__version__ = "2023.4.1"
__version__ = "2022.12.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -1,7 +1,4 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
@ -24,44 +21,38 @@ class CoordinateSerializer(PassiveSerializer):
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
logins_per_1h = SerializerMethodField()
logins_failed_per_1h = SerializerMethodField()
authorizations_per_1h = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
def get_authorizations_per_1h(self, _):
"""Get successful authorizations per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION)
.get_events_per_hour()
)

View File

@ -18,7 +18,6 @@ from authentik.core.api.utils import PassiveSerializer
from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
from authentik.tenants.utils import get_tenant
class RuntimeDict(TypedDict):
@ -78,7 +77,7 @@ class SystemSerializer(PassiveSerializer):
def get_tenant(self, request: Request) -> str:
"""Currently active tenant"""
return str(get_tenant(request))
return str(request._request.tenant)
def get_server_time(self, request: Request) -> datetime:
"""Current server time"""
@ -98,14 +97,8 @@ class SystemView(APIView):
permission_classes = [IsAdminUser]
pagination_class = None
filter_backends = []
serializer_class = SystemSerializer
@extend_schema(responses={200: SystemSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemSerializer(request).data)
@extend_schema(responses={200: SystemSerializer(many=False)})
def post(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemSerializer(request).data)

View File

@ -7,13 +7,7 @@ from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import (
CharField,
ChoiceField,
DateTimeField,
ListField,
SerializerMethodField,
)
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
@ -32,7 +26,6 @@ class TaskSerializer(PassiveSerializer):
task_name = CharField()
task_description = CharField()
task_finish_timestamp = DateTimeField(source="finish_time")
task_duration = SerializerMethodField()
status = ChoiceField(
source="result.status.name",
@ -40,18 +33,13 @@ class TaskSerializer(PassiveSerializer):
)
messages = ListField(source="result.messages")
def get_task_duration(self, instance: TaskInfo) -> int:
"""Get the duration a task took to run"""
return max(instance.finish_timestamp - instance.start_timestamp, 0)
def to_representation(self, instance: TaskInfo):
def to_representation(self, instance):
"""When a new version of authentik adds fields to TaskInfo,
the API will fail with an AttributeError, as the classes
are pickled in cache. In that case, just delete the info"""
try:
return super().to_representation(instance)
# pylint: disable=broad-except
except Exception: # pragma: no cover
except AttributeError: # pragma: no cover
if isinstance(self.instance, list):
for inst in self.instance:
inst.delete()
@ -80,6 +68,7 @@ class TaskViewSet(ViewSet):
),
],
)
# pylint: disable=invalid-name
def retrieve(self, request: Request, pk=None) -> Response:
"""Get a single system task"""
task = TaskInfo.by_name(pk)
@ -110,6 +99,7 @@ class TaskViewSet(ViewSet):
],
)
@action(detail=True, methods=["post"])
# pylint: disable=invalid-name
def retry(self, request: Request, pk=None) -> Response:
"""Retry task"""
task = TaskInfo.by_name(pk)

View File

@ -8,6 +8,7 @@ from authentik.root.monitoring import monitoring_set
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
count = len(CELERY_APP.control.ping(timeout=0.5))
@ -15,7 +16,8 @@ def monitoring_set_workers(sender, **kwargs):
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_tasks(sender, **kwargs):
"""Set task gauges"""
for task in TaskInfo.all().values():
task.update_metrics()
task.set_prom_metrics()

View File

@ -9,7 +9,6 @@ from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Group, User
from authentik.core.tasks import clean_expired_models
from authentik.events.monitored_tasks import TaskResultStatus
from authentik.lib.generators import generate_id
class TestAdminAPI(TestCase):
@ -17,8 +16,8 @@ class TestAdminAPI(TestCase):
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create(username=generate_id())
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
self.user = User.objects.create(username="test-user")
self.group = Group.objects.create(name="superusers", is_superuser=True)
self.group.users.add(self.user)
self.group.save()
self.client.force_login(self.user)

View File

@ -32,17 +32,7 @@ def validate_auth(header: bytes) -> Optional[str]:
def bearer_auth(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
user = auth_user_lookup(raw_header)
if not user:
return None
if not user.is_active:
raise AuthenticationFailed("Token invalid/expired")
return user
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
from authentik.providers.oauth2.models import AccessToken
from authentik.providers.oauth2.models import RefreshToken
auth_credentials = validate_auth(raw_header)
if not auth_credentials:
@ -55,8 +45,8 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
CTX_AUTH_VIA.set("api_token")
return key_token.user
# then try to auth via JWT
jwt_token = AccessToken.filter_not_expired(
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
jwt_token = RefreshToken.filter_not_expired(
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
).first()
if jwt_token:
# Double-check scopes, since they are saved in a single string

View File

@ -7,13 +7,82 @@ API Browser - {{ tenant.branding_title }}
{% endblock %}
{% block head %}
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
<link rel="icon" href="{{ tenant.branding_favicon }}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
<script>
function getCookie(name) {
let cookieValue = "";
if (document.cookie && document.cookie !== "") {
const cookies = document.cookie.split(";");
for (let i = 0; i < cookies.length; i++) {
const cookie = cookies[i].trim();
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === name + "=") {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
window.addEventListener('DOMContentLoaded', (event) => {
const rapidocEl = document.querySelector('rapi-doc');
rapidocEl.addEventListener('before-try', (e) => {
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
});
});
</script>
<style>
img.logo {
width: 100%;
padding: 1rem 0.5rem 1.5rem 0.5rem;
min-height: 48px;
}
</style>
{% endblock %}
{% block body %}
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
<rapi-doc
spec-url="{{ path }}"
heading-text=""
theme="light"
render-style="read"
default-schema-tab="schema"
primary-color="#fd4b2d"
nav-bg-color="#212427"
bg-color="#000000"
text-color="#000000"
nav-text-color="#ffffff"
nav-hover-bg-color="#3c3f42"
nav-accent-color="#4f5255"
nav-hover-text-color="#ffffff"
use-path-in-nav-bar="true"
nav-item-spacing="relaxed"
allow-server-selection="false"
show-header="false"
allow-spec-url-load="false"
allow-spec-file-load="false">
<div slot="nav-logo">
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
</div>
</rapi-doc>
<script>
const rapidoc = document.querySelector("rapi-doc");
const matcher = window.matchMedia("(prefers-color-scheme: light)");
const changer = (ev) => {
const style = getComputedStyle(document.documentElement);
let bg, text = "";
if (matcher.matches) {
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
text = style.getPropertyValue('--pf-global--Color--300');
} else {
bg = style.getPropertyValue('--ak-dark-background');
text = style.getPropertyValue('--ak-dark-foreground');
}
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
rapidoc.requestUpdate();
};
matcher.addEventListener("change", changer);
window.addEventListener("load", changer);
</script>
{% endblock %}

View File

@ -1,19 +1,18 @@
"""Test API Authentication"""
import json
from base64 import b64encode
from django.conf import settings
from django.test import TestCase
from django.utils import timezone
from guardian.shortcuts import get_anonymous_user
from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
class TestAPIAuth(TestCase):
@ -37,18 +36,9 @@ class TestAPIAuth(TestCase):
def test_bearer_valid(self):
"""Test valid token"""
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
def test_bearer_valid_deactivated(self):
"""Test valid token"""
user = create_test_admin_user()
user.is_active = False
user.save()
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {token.key}".encode())
def test_managed_outpost(self):
"""Test managed outpost"""
with self.assertRaises(AuthenticationFailed):
@ -65,28 +55,24 @@ class TestAPIAuth(TestCase):
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = AccessToken.objects.create(
user=create_test_admin_user(),
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
token=generate_id(),
auth_time=timezone.now(),
refresh_token=generate_id(),
_scope=SCOPE_AUTHENTIK_API,
_id_token=json.dumps({}),
)
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
def test_jwt_missing_scope(self):
"""Test valid JWT"""
provider = OAuth2Provider.objects.create(
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = AccessToken.objects.create(
user=create_test_admin_user(),
refresh = RefreshToken.objects.create(
user=get_anonymous_user(),
provider=provider,
token=generate_id(),
auth_time=timezone.now(),
refresh_token=generate_id(),
_scope="",
_id_token=json.dumps({}),
)
with self.assertRaises(AuthenticationFailed):
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)

View File

@ -4,7 +4,6 @@ from guardian.shortcuts import assign_perm
from rest_framework.test import APITestCase
from authentik.core.models import Application, User
from authentik.lib.generators import generate_id
class TestAPIDecorators(APITestCase):
@ -17,7 +16,7 @@ class TestAPIDecorators(APITestCase):
def test_obj_perm_denied(self):
"""Test object perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
app = Application.objects.create(name="denied", slug="denied")
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
)
@ -26,7 +25,7 @@ class TestAPIDecorators(APITestCase):
def test_other_perm_denied(self):
"""Test other perm denied"""
self.client.force_login(self.user)
app = Application.objects.create(name=generate_id(), slug=generate_id())
app = Application.objects.create(name="denied", slug="denied")
assign_perm("authentik_core.view_application", self.user, app)
response = self.client.get(
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})

View File

@ -29,7 +29,6 @@ class Capabilities(models.TextChoices):
CAN_GEO_IP = "can_geo_ip"
CAN_IMPERSONATE = "can_impersonate"
CAN_DEBUG = "can_debug"
IS_ENTERPRISE = "is_enterprise"
class ErrorReportingConfigSerializer(PassiveSerializer):
@ -71,8 +70,6 @@ class ConfigView(APIView):
caps.append(Capabilities.CAN_IMPERSONATE)
if settings.DEBUG: # pragma: no cover
caps.append(Capabilities.CAN_DEBUG)
if "authentik.enterprise" in settings.INSTALLED_APPS:
caps.append(Capabilities.IS_ENTERPRISE)
return caps
def get_config(self) -> ConfigSerializer:

View File

@ -33,7 +33,6 @@ from authentik.flows.api.flows import FlowViewSet
from authentik.flows.api.stages import StageViewSet
from authentik.flows.views.executor import FlowExecutorView
from authentik.flows.views.inspector import FlowInspectorView
from authentik.interfaces.api import InterfaceViewSet
from authentik.outposts.api.outposts import OutpostViewSet
from authentik.outposts.api.service_connections import (
DockerServiceConnectionViewSet,
@ -46,22 +45,16 @@ from authentik.policies.dummy.api import DummyPolicyViewSet
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
from authentik.policies.expression.api import ExpressionPolicyViewSet
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
from authentik.policies.password.api import PasswordPolicyViewSet
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
from authentik.providers.oauth2.api.tokens import (
AccessTokenViewSet,
AuthorizationCodeViewSet,
RefreshTokenViewSet,
)
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
from authentik.providers.radius.api import RadiusOutpostConfigViewSet, RadiusProviderViewSet
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
from authentik.providers.saml.api.providers import SAMLProviderViewSet
from authentik.providers.scim.api.property_mapping import SCIMMappingViewSet
from authentik.providers.scim.api.providers import SCIMProviderViewSet
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
from authentik.sources.oauth.api.source import OAuthSourceViewSet
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
@ -124,15 +117,12 @@ router.register("core/user_consent", UserConsentViewSet)
router.register("core/tokens", TokenViewSet)
router.register("core/tenants", TenantViewSet)
router.register("interfaces", InterfaceViewSet)
router.register("outposts/instances", OutpostViewSet)
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
router.register("outposts/radius", RadiusOutpostConfigViewSet)
router.register("flows/instances", FlowViewSet)
router.register("flows/bindings", FlowStageBindingViewSet)
@ -160,6 +150,7 @@ router.register("policies/all", PolicyViewSet)
router.register("policies/bindings", PolicyBindingViewSet)
router.register("policies/expression", ExpressionPolicyViewSet)
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
router.register("policies/password", PasswordPolicyViewSet)
router.register("policies/reputation/scores", ReputationViewSet)
@ -170,19 +161,15 @@ router.register("providers/ldap", LDAPProviderViewSet)
router.register("providers/proxy", ProxyProviderViewSet)
router.register("providers/oauth2", OAuth2ProviderViewSet)
router.register("providers/saml", SAMLProviderViewSet)
router.register("providers/scim", SCIMProviderViewSet)
router.register("providers/radius", RadiusProviderViewSet)
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
router.register("oauth2/access_tokens", AccessTokenViewSet)
router.register("propertymappings/all", PropertyMappingViewSet)
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
router.register("propertymappings/scope", ScopeMappingViewSet)
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
router.register("propertymappings/scim", SCIMMappingViewSet)
router.register("authenticators/all", DeviceViewSet, basename="device")
router.register("authenticators/duo", DuoDeviceViewSet)

View File

@ -1,5 +1,4 @@
"""Serializer mixin for managed models"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
@ -12,7 +11,6 @@ from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
@ -42,22 +40,8 @@ class BlueprintInstanceSerializer(ModelSerializer):
raise ValidationError(exc) from exc
return path
def validate_content(self, content: str) -> str:
"""Ensure content (if set) is a valid blueprint"""
if content == "":
return content
context = self.instance.context if self.instance else {}
valid, logs = Importer(content, context).validate()
if not valid:
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
return content
def validate(self, attrs: dict) -> dict:
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
raise ValidationError(_("Either path or content must be set."))
return super().validate(attrs)
class Meta:
model = BlueprintInstance
fields = [
"pk",
@ -70,7 +54,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
"enabled",
"managed_models",
"metadata",
"content",
]
extra_kwargs = {
"status": {"read_only": True},

View File

@ -55,12 +55,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
"""Load v1 tasks"""
self.import_module("authentik.blueprints.v1.tasks")
def reconcile_blueprints_discovery(self):
def reconcile_blueprints_discover(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
from authentik.blueprints.v1.tasks import blueprints_discover
blueprints_discovery.delay()
clear_failed_blueprints.delay()
blueprints_discover.delay()
def import_models(self):
super().import_models()

View File

@ -19,8 +19,10 @@ class Command(BaseCommand):
for blueprint_path in options.get("blueprints", []):
content = BlueprintInstance(path=blueprint_path).retrieve()
importer = Importer(content)
valid, _ = importer.validate()
valid, logs = importer.validate()
if not valid:
for log in logs:
getattr(LOGGER, log.pop("log_level"))(**log)
self.stderr.write("blueprint invalid")
sys_exit(1)
importer.apply()

View File

@ -6,6 +6,7 @@ from pathlib import Path
import django.contrib.postgres.fields
from dacite.core import from_dict
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from yaml import load
@ -14,7 +15,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
from authentik.lib.config import CONFIG
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
"""Check if blueprint should be imported"""
from authentik.blueprints.models import BlueprintInstanceStatus
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
@ -70,6 +71,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
class Migration(migrations.Migration):
initial = True
dependencies = [("authentik_flows", "0001_initial")]
@ -84,12 +86,7 @@ class Migration(migrations.Migration):
"managed",
models.TextField(
default=None,
help_text=(
"Objects which are managed by authentik. These objects are created and"
" updated automatically. This is flag only indicates that an object can"
" be overwritten by migrations. You can still modify the objects via"
" the API, but expect changes to be overwritten in a later update."
),
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",

View File

@ -1,22 +0,0 @@
# Generated by Django 4.1.5 on 2023-01-10 19:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="blueprintinstance",
name="content",
field=models.TextField(blank=True, default=""),
),
migrations.AlterField(
model_name="blueprintinstance",
name="path",
field=models.TextField(blank=True, default=""),
),
]

View File

@ -1,18 +1,30 @@
"""blueprint models"""
from pathlib import Path
from urllib.parse import urlparse
from uuid import uuid4
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils.translation import gettext_lazy as _
from opencontainers.distribution.reggie import (
NewClient,
WithDebug,
WithDefaultName,
WithDigest,
WithReference,
WithUserAgent,
WithUsernamePassword,
)
from requests.exceptions import RequestException
from rest_framework.serializers import Serializer
from structlog import get_logger
from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
LOGGER = get_logger()
@ -29,15 +41,18 @@ class ManagedModel(models.Model):
null=True,
verbose_name=_("Managed by authentik"),
help_text=_(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
(
"Objects which are managed by authentik. These objects are created and updated "
"automatically. This is flag only indicates that an object can be overwritten by "
"migrations. You can still modify the objects via the API, but expect changes "
"to be overwritten in a later update."
)
),
unique=True,
)
class Meta:
abstract = True
@ -59,8 +74,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField(default="", blank=True)
content = models.TextField(default="", blank=True)
path = models.TextField()
context = models.JSONField(default=dict)
last_applied = models.DateTimeField(auto_now=True)
last_applied_hash = models.TextField()
@ -72,29 +86,60 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry"""
client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
url = urlparse(self.path)
ref = "latest"
path = url.path[1:]
if ":" in url.path:
path, _, ref = path.partition(":")
client = NewClient(
f"https://{url.hostname}",
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(url.username, url.password),
WithDefaultName(path),
WithDebug(True),
)
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
manifest_request = client.NewRequest(
"GET",
"/v2/<name>/manifests/<reference>",
WithReference(ref),
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
try:
manifests = client.fetch_manifests()
return client.fetch_blobs(manifests)
except OCIException as exc:
manifest_response = client.Do(manifest_request)
manifest_response.raise_for_status()
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
manifest = manifest_response.json()
if "errors" in manifest:
raise BlueprintRetrievalFailed(manifest["errors"])
def retrieve_file(self) -> str:
"""Get blueprint from path"""
blob = None
for layer in manifest.get("layers", []):
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
blob = layer.get("digest")
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
if not blob:
raise BlueprintRetrievalFailed("Blob not found")
blob_request = client.NewRequest(
"GET",
"/v2/<name>/blobs/<digest>",
WithDigest(blob),
)
try:
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
except (IOError, OSError) as exc:
blob_response = client.Do(blob_request)
blob_response.raise_for_status()
return blob_response.text
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if self.path.startswith("oci://"):
return self.retrieve_oci()
if self.path != "":
return self.retrieve_file()
return self.content
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
@property
def serializer(self) -> Serializer:
@ -106,6 +151,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
return f"Blueprint Instance {self.name}"
class Meta:
verbose_name = _("Blueprint Instance")
verbose_name_plural = _("Blueprint Instances")
unique_together = (

View File

@ -5,13 +5,8 @@ from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"blueprints_v1_discover": {
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
"blueprints_v1_cleanup": {
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -1,5 +1,6 @@
"""Blueprint helpers"""
from functools import wraps
from pathlib import Path
from typing import Callable
from django.apps import apps
@ -44,3 +45,13 @@ def reconcile_app(app_name: str):
return wrapper
return wrapper_outer
def load_yaml_fixture(path: str, **kwargs) -> str:
"""Load yaml fixture, optionally formatting it with kwargs"""
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
fixture = _fixture.read()
try:
return fixture % kwargs
except TypeError:
return fixture

View File

@ -4,7 +4,6 @@ entries:
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
model: authentik_stages_prompt.prompt
attrs:
name: qwerweqrq
field_key: username
label: Username
type: username

View File

@ -3,12 +3,6 @@ context:
foo: bar
policy_property: name
policy_property_value: foo-bar-baz-qux
sequence:
- foo
- bar
mapping:
key1: value
key2: 2
entries:
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
state: !Format ["%s", present]
@ -25,7 +19,7 @@ entries:
[slug, default-source-authentication],
]
enrollment_flow:
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
!Find [authentik_flows.Flow, [slug, default-source-enrollment]]
- attrs:
expression: return True
identifiers:
@ -98,49 +92,6 @@ entries:
]
if_true_simple: !If [!Context foo, true, text]
if_false_simple: !If [null, false, 2]
enumerate_mapping_to_mapping: !Enumerate [
!Context mapping,
MAP,
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
]
enumerate_mapping_to_sequence: !Enumerate [
!Context mapping,
SEQ,
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
]
enumerate_sequence_to_sequence: !Enumerate [
!Context sequence,
SEQ,
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
]
enumerate_sequence_to_mapping: !Enumerate [
!Context sequence,
MAP,
[!Format ["index: %d", !Index 0], !Value 0]
]
nested_complex_enumeration: !Enumerate [
!Context sequence,
MAP,
[
!Index 0,
!Enumerate [
!Context mapping,
MAP,
[
!Format ["%s", !Index 0],
[
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
{
outer_value: !Value 1,
outer_index: !Index 1,
middle_value: !Value 0,
middle_index: !Index 0
}
]
]
]
]
]
identifiers:
name: test
conditions:

View File

@ -2,8 +2,7 @@
from django.test import TransactionTestCase
from requests_mock import Mocker
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
from authentik.blueprints.models import OCI_MEDIA_TYPE, BlueprintInstance, BlueprintRetrievalFailed
class TestBlueprintOCI(TransactionTestCase):

View File

@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
class TestPackaged(TransactionTestCase):
"""Empty class, test methods are added dynamically"""
@apply_blueprint("default/default-tenant.yaml")
@apply_blueprint("default/90-default-tenant.yaml")
def test_decorator_static(self):
"""Test @apply_blueprint decorator"""
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())

View File

@ -3,12 +3,12 @@ from os import environ
from django.test import TransactionTestCase
from authentik.blueprints.tests import load_yaml_fixture
from authentik.blueprints.v1.exporter import FlowExporter
from authentik.blueprints.v1.importer import Importer, transaction_rollback
from authentik.core.models import Group
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.policies.expression.models import ExpressionPolicy
from authentik.policies.models import PolicyBinding
from authentik.sources.oauth.models import OAuthSource
@ -24,14 +24,18 @@ class TestBlueprintsV1(TransactionTestCase):
importer = Importer('{"version": 3}')
self.assertFalse(importer.validate()[0])
importer = Importer(
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
'"model": "authentik_core.User"}]}'
(
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
'"model": "authentik_core.User"}]}'
)
)
self.assertFalse(importer.validate()[0])
importer = Importer(
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
'"identifiers": {}, '
'"model": "authentik_core.Group"}]}'
(
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
'"identifiers": {}, '
'"model": "authentik_core.Group"}]}'
)
)
self.assertFalse(importer.validate()[0])
@ -55,9 +59,11 @@ class TestBlueprintsV1(TransactionTestCase):
)
importer = Importer(
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
'["other_value"]}}, "model": "authentik_core.Group"}]}'
(
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
'["other_value"]}}, "model": "authentik_core.Group"}]}'
)
)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
@ -113,14 +119,14 @@ class TestBlueprintsV1(TransactionTestCase):
"""Test export and import it twice"""
count_initial = Prompt.objects.filter(field_key="username").count()
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
count_before = Prompt.objects.filter(field_key="username").count()
self.assertEqual(count_initial + 1, count_before)
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
self.assertTrue(importer.apply())
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
@ -130,7 +136,7 @@ class TestBlueprintsV1(TransactionTestCase):
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
Group.objects.filter(name="test").delete()
environ["foo"] = generate_id()
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
@ -156,61 +162,6 @@ class TestBlueprintsV1(TransactionTestCase):
"if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True,
"if_false_simple": 2,
"enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value",
"prefix-key2": "other-prefix-2",
},
"enumerate_mapping_to_sequence": [
"prefixed-pair-key1-value",
"prefixed-pair-key2-2",
],
"enumerate_sequence_to_sequence": [
"prefixed-items-0-foo",
"prefixed-items-1-bar",
],
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
"nested_complex_enumeration": {
"0": {
"key1": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": 2,
"middle_index": "key2",
},
],
},
"1": {
"key1": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": 2,
"middle_index": "key2",
},
],
},
},
}
)
)
@ -256,21 +207,15 @@ class TestBlueprintsV1(TransactionTestCase):
with transaction_rollback():
# First stage fields
username_prompt = Prompt.objects.create(
name=generate_id(),
field_key="username",
label="Username",
order=0,
type=FieldTypes.TEXT,
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
)
password = Prompt.objects.create(
name=generate_id(),
field_key="password",
label="Password",
order=1,
type=FieldTypes.PASSWORD,
)
password_repeat = Prompt.objects.create(
name=generate_id(),
field_key="password_repeat",
label="Password (repeat)",
order=2,

View File

@ -43,28 +43,3 @@ class TestBlueprintsV1API(APITestCase):
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)
def test_api_blank(self):
"""Test blank"""
res = self.client.post(
reverse("authentik_api:blueprintinstance-list"),
data={
"name": "foo",
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
)
def test_api_content(self):
"""Test blank"""
res = self.client.post(
reverse("authentik_api:blueprintinstance-list"),
data={
"name": "foo",
"content": '{"version": 3}',
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})

View File

@ -1,10 +1,10 @@
"""Test blueprints v1"""
from django.test import TransactionTestCase
from authentik.blueprints.tests import load_yaml_fixture
from authentik.blueprints.v1.importer import Importer
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestBlueprintsV1Conditions(TransactionTestCase):
@ -14,7 +14,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
"""Test conditions fulfilled"""
flow_slug1 = generate_id()
flow_slug2 = generate_id()
import_yaml = load_fixture(
import_yaml = load_yaml_fixture(
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
)
@ -31,7 +31,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
"""Test conditions not fulfilled"""
flow_slug1 = generate_id()
flow_slug2 = generate_id()
import_yaml = load_fixture(
import_yaml = load_yaml_fixture(
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
)

View File

@ -1,10 +1,10 @@
"""Test blueprints v1"""
from django.test import TransactionTestCase
from authentik.blueprints.tests import load_yaml_fixture
from authentik.blueprints.v1.importer import Importer
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestBlueprintsV1State(TransactionTestCase):
@ -13,7 +13,7 @@ class TestBlueprintsV1State(TransactionTestCase):
def test_state_present(self):
"""Test state present"""
flow_slug = generate_id()
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
importer = Importer(import_yaml)
self.assertTrue(importer.validate()[0])
@ -39,7 +39,7 @@ class TestBlueprintsV1State(TransactionTestCase):
def test_state_created(self):
"""Test state created"""
flow_slug = generate_id()
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
importer = Importer(import_yaml)
self.assertTrue(importer.validate()[0])
@ -65,7 +65,7 @@ class TestBlueprintsV1State(TransactionTestCase):
def test_state_absent(self):
"""Test state absent"""
flow_slug = generate_id()
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
importer = Importer(import_yaml)
self.assertTrue(importer.validate()[0])
@ -74,7 +74,7 @@ class TestBlueprintsV1State(TransactionTestCase):
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
self.assertEqual(flow.slug, flow_slug)
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
importer = Importer(import_yaml)
self.assertTrue(importer.validate()[0])
self.assertTrue(importer.apply())

View File

@ -6,7 +6,7 @@ from django.test import TransactionTestCase
from yaml import dump
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id
@ -53,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
file.seek(0)
file_hash = sha512(file.read().encode()).hexdigest()
file.flush()
blueprints_discovery() # pylint: disable=no-value-for-parameter
blueprints_discover() # pylint: disable=no-value-for-parameter
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
self.assertEqual(instance.last_applied_hash, file_hash)
self.assertEqual(
@ -81,7 +81,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
)
)
file.flush()
blueprints_discovery() # pylint: disable=no-value-for-parameter
blueprints_discover() # pylint: disable=no-value-for-parameter
blueprint = BlueprintInstance.objects.filter(name="foo").first()
self.assertEqual(
blueprint.last_applied_hash,
@ -106,7 +106,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
)
)
file.flush()
blueprints_discovery() # pylint: disable=no-value-for-parameter
blueprints_discover() # pylint: disable=no-value-for-parameter
blueprint.refresh_from_db()
self.assertEqual(
blueprint.last_applied_hash,

View File

@ -1,15 +1,13 @@
"""transfer common classes"""
from collections import OrderedDict
from copy import copy
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from functools import reduce
from operator import ixor
from os import getenv
from typing import Any, Iterable, Literal, Mapping, Optional, Union
from typing import Any, Literal, Optional, Union
from uuid import UUID
from deepmerge import always_merger
from django.apps import apps
from django.db.models import Model, Q
from rest_framework.fields import Field
@ -66,13 +64,11 @@ class BlueprintEntry:
identifiers: dict[str, Any] = field(default_factory=dict)
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
# pylint: disable=invalid-name
id: Optional[str] = None
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
def __post_init__(self, *args, **kwargs) -> None:
self.__tag_contexts: list["YAMLTagContext"] = []
@staticmethod
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
"""Convert a SerializerModel instance to a blueprint Entry"""
@ -89,46 +85,17 @@ class BlueprintEntry:
attrs=all_attrs,
)
def _get_tag_context(
self,
depth: int = 0,
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
) -> "YAMLTagContext":
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
if depth < 0:
raise ValueError("depth must be a positive number or zero")
if context_tag_type:
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
else:
contexts = self.__tag_contexts
try:
return contexts[-(depth + 1)]
except IndexError:
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
"""Check if we have any special tags that need handling"""
val = copy(value)
if isinstance(value, YAMLTagContext):
self.__tag_contexts.append(value)
if isinstance(value, YAMLTag):
val = value.resolve(self, blueprint)
return value.resolve(self, blueprint)
if isinstance(value, dict):
for key, inner_value in value.items():
val[key] = self.tag_resolver(inner_value, blueprint)
value[key] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, list):
for idx, inner_value in enumerate(value):
val[idx] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, YAMLTagContext):
self.__tag_contexts.pop()
return val
value[idx] = self.tag_resolver(inner_value, blueprint)
return value
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
@ -178,19 +145,12 @@ class YAMLTag:
raise NotImplementedError
class YAMLTagContext:
"""Base class for all YAML Tag Contexts"""
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag context logic"""
raise NotImplementedError
class KeyOf(YAMLTag):
"""Reference another object by their ID"""
id_from: str
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.id_from = node.value
@ -217,6 +177,7 @@ class Env(YAMLTag):
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
@ -236,6 +197,7 @@ class Context(YAMLTag):
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
@ -258,6 +220,7 @@ class Format(YAMLTag):
format_string: str
args: list[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.format_string = node.value[0].value
@ -282,12 +245,15 @@ class Format(YAMLTag):
class Find(YAMLTag):
"""Find any object"""
model_name: str | YAMLTag
model_name: str
conditions: list[list]
model_class: type[Model]
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.model_name = loader.construct_object(node.value[0])
self.model_name = node.value[0].value
self.model_class = apps.get_model(*self.model_name.split("."))
self.conditions = []
for raw_node in node.value[1:]:
values = []
@ -296,13 +262,6 @@ class Find(YAMLTag):
self.conditions.append(values)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.model_name, YAMLTag):
model_name = self.model_name.resolve(entry, blueprint)
else:
model_name = self.model_name
model_class = apps.get_model(*model_name.split("."))
query = Q()
for cond in self.conditions:
if isinstance(cond[0], YAMLTag):
@ -314,7 +273,7 @@ class Find(YAMLTag):
else:
query_value = cond[1]
query &= Q(**{query_key: query_value})
instance = model_class.objects.filter(query).first()
instance = self.model_class.objects.filter(query).first()
if instance:
return instance.pk
return None
@ -337,6 +296,7 @@ class Condition(YAMLTag):
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
}
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.mode = node.value[0].value
@ -369,6 +329,7 @@ class If(YAMLTag):
when_true: Any
when_false: Any
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.condition = loader.construct_object(node.value[0])
@ -390,133 +351,6 @@ class If(YAMLTag):
raise EntryInvalidError(exc)
class Enumerate(YAMLTag, YAMLTagContext):
"""Iterate over an iterable."""
iterable: YAMLTag | Iterable
item_body: Any
output_body: Literal["SEQ", "MAP"]
_OUTPUT_BODIES = {
"SEQ": (list, lambda a, b: [*a, b]),
"MAP": (
dict,
lambda a, b: always_merger.merge(
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
),
),
}
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.iterable = loader.construct_object(node.value[0])
self.output_body = node.value[1].value
self.item_body = loader.construct_object(node.value[2])
self.__current_context: tuple[Any, Any] = tuple()
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
return self.__current_context
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
raise EntryInvalidError(
f"{self.__class__.__name__} tag's iterable references this tag's context. "
"This is a noop. Check you are setting depth bigger than 0."
)
if isinstance(self.iterable, YAMLTag):
iterable = self.iterable.resolve(entry, blueprint)
else:
iterable = self.iterable
if not isinstance(iterable, Iterable):
raise EntryInvalidError(
f"{self.__class__.__name__}'s iterable must be an iterable "
"such as a sequence or a mapping"
)
if isinstance(iterable, Mapping):
iterable = tuple(iterable.items())
else:
iterable = tuple(enumerate(iterable))
try:
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
except KeyError as exc:
raise EntryInvalidError(exc)
result = output_class()
self.__current_context = tuple()
try:
for item in iterable:
self.__current_context = item
resolved_body = entry.tag_resolver(self.item_body, blueprint)
result = add_fn(result, resolved_body)
if not isinstance(result, output_class):
raise EntryInvalidError(
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
)
finally:
self.__current_context = tuple()
return result
class EnumeratedItem(YAMLTag):
"""Get the current item value and index provided by an Enumerate tag context"""
depth: int
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.depth = int(node.value)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
context_tag: Enumerate = entry._get_tag_context(
depth=self.depth,
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
)
except ValueError as exc:
if self.depth == 0:
raise EntryInvalidError(
f"{self.__class__.__name__} tags are only usable "
f"inside an {Enumerate.__name__} tag"
)
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
return context_tag.get_context(entry, blueprint)
class Index(EnumeratedItem):
"""Get the current item index provided by an Enumerate tag context"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
context = super().resolve(entry, blueprint)
try:
return context[0]
except IndexError: # pragma: no cover
raise EntryInvalidError(f"Empty/invalid context: {context}")
class Value(EnumeratedItem):
"""Get the current item value provided by an Enumerate tag context"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
context = super().resolve(entry, blueprint)
try:
return context[1]
except IndexError: # pragma: no cover
raise EntryInvalidError(f"Empty/invalid context: {context}")
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
@ -560,9 +394,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Condition", Condition)
self.add_constructor("!If", If)
self.add_constructor("!Env", Env)
self.add_constructor("!Enumerate", Enumerate)
self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index)
class EntryInvalidError(SentryIgnoredException):

View File

@ -7,7 +7,6 @@ from dacite.config import Config
from dacite.core import from_dict
from dacite.exceptions import DaciteError
from deepmerge import always_merger
from django.core.exceptions import FieldError
from django.db import transaction
from django.db.models import Model
from django.db.models.query_utils import Q
@ -40,10 +39,6 @@ from authentik.lib.models import SerializerModel
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
# Context set when the serializer is created in a blueprint context
# Update website/developer-docs/blueprints/v1/models.md when used
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
def is_model_allowed(model: type[Model]) -> bool:
"""Check if model is allowed"""
@ -162,12 +157,7 @@ class Importer:
raise EntryInvalidError(f"Model {model} not allowed")
if issubclass(model, BaseMetaModel):
serializer_class: type[Serializer] = model.serializer()
serializer = serializer_class(
data=entry.get_attrs(self.__import),
context={
SERIALIZER_CONTEXT_BLUEPRINT: entry,
},
)
serializer = serializer_class(data=entry.get_attrs(self.__import))
try:
serializer.is_valid(raise_exception=True)
except ValidationError as exc:
@ -191,10 +181,7 @@ class Importer:
if not query:
raise EntryInvalidError("No or invalid identifiers")
try:
existing_models = model.objects.filter(query)
except FieldError as exc:
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
existing_models = model.objects.filter(query)
serializer_kwargs = {}
model_instance = existing_models.first()
@ -226,12 +213,7 @@ class Importer:
always_merger.merge(full_data, updated_identifiers)
serializer_kwargs["data"] = full_data
serializer: Serializer = model().serializer(
context={
SERIALIZER_CONTEXT_BLUEPRINT: entry,
},
**serializer_kwargs,
)
serializer: Serializer = model().serializer(**serializer_kwargs)
try:
serializer.is_valid(raise_exception=True)
except ValidationError as exc:
@ -249,7 +231,8 @@ class Importer:
raise IntegrityError
except IntegrityError:
return False
self.logger.debug("Committing changes")
else:
self.logger.debug("Committing changes")
return True
def _apply_models(self) -> bool:

View File

@ -3,4 +3,3 @@
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"

View File

@ -56,4 +56,5 @@ class MetaApplyBlueprint(BaseMetaModel):
return ApplyBlueprintMetaSerializer
class Meta:
abstract = True

View File

@ -14,6 +14,7 @@ class BaseMetaModel(Model):
raise NotImplementedError
class Meta:
abstract = True

View File

@ -1,98 +0,0 @@
"""OCI Client"""
from typing import Any
from urllib.parse import ParseResult, urlparse
from opencontainers.distribution.reggie import (
NewClient,
WithDebug,
WithDefaultName,
WithDigest,
WithReference,
WithUserAgent,
WithUsernamePassword,
)
from requests.exceptions import RequestException
from structlog import get_logger
from structlog.stdlib import BoundLogger
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
class OCIException(SentryIgnoredException):
"""OCI-related errors"""
class BlueprintOCIClient:
"""Blueprint OCI Client"""
url: ParseResult
sanitized_url: str
logger: BoundLogger
ref: str
client: NewClient
def __init__(self, url: str) -> None:
self._parse_url(url)
self.logger = get_logger().bind(url=self.sanitized_url)
self.ref = "latest"
path = self.url.path[1:]
if ":" in self.url.path:
path, _, self.ref = path.partition(":")
self.client = NewClient(
f"https://{self.url.hostname}",
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(self.url.username, self.url.password),
WithDefaultName(path),
WithDebug(True),
)
def _parse_url(self, url: str):
self.url = urlparse(url)
netloc = self.url.netloc
if "@" in netloc:
netloc = netloc[netloc.index("@") + 1 :]
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
def fetch_manifests(self) -> dict[str, Any]:
"""Fetch manifests for ref"""
self.logger.info("Fetching OCI manifests for blueprint")
manifest_request = self.client.NewRequest(
"GET",
"/v2/<name>/manifests/<reference>",
WithReference(self.ref),
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
try:
manifest_response = self.client.Do(manifest_request)
manifest_response.raise_for_status()
except RequestException as exc:
raise OCIException(exc) from exc
manifest = manifest_response.json()
if "errors" in manifest:
raise OCIException(manifest["errors"])
return manifest
def fetch_blobs(self, manifest: dict[str, Any]):
"""Fetch blob based on manifest info"""
blob = None
for layer in manifest.get("layers", []):
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
blob = layer.get("digest")
self.logger.debug("Found layer with matching media type", blob=blob)
if not blob:
raise OCIException("Blob not found")
blob_request = self.client.NewRequest(
"GET",
"/v2/<name>/blobs/<digest>",
WithDigest(blob),
)
try:
blob_response = self.client.Do(blob_request)
blob_response.raise_for_status()
return blob_response.text
except RequestException as exc:
raise OCIException(exc) from exc

View File

@ -76,7 +76,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
return
if isinstance(event, FileCreatedEvent):
LOGGER.debug("new blueprint file created, starting discovery")
blueprints_discovery.delay()
blueprints_discover.delay()
if isinstance(event, FileModifiedEvent):
path = Path(event.src_path)
root = Path(CONFIG.y("blueprints_dir")).absolute()
@ -122,7 +122,7 @@ def blueprints_find():
)
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
blueprints.append(blueprint)
LOGGER.debug(
LOGGER.info(
"parsed & loaded blueprint",
hash=file_hash,
path=str(path),
@ -134,7 +134,7 @@ def blueprints_find():
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
)
@prefill_task
def blueprints_discovery(self: MonitoredTask):
def blueprints_discover(self: MonitoredTask):
"""Find blueprints and check if they need to be created in the database"""
count = 0
for blueprint in blueprints_find():
@ -219,14 +219,3 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
finally:
if instance:
instance.save()
@CELERY_APP.task()
def clear_failed_blueprints():
"""Remove blueprints which couldn't be fetched"""
# Exclude OCI blueprints as those might be temporarily unavailable
for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
try:
blueprint.retrieve()
except BlueprintRetrievalFailed:
blueprint.delete()

View File

@ -1,10 +1,8 @@
"""Application API Views"""
from datetime import timedelta
from typing import Optional
from django.core.cache import cache
from django.db.models import QuerySet
from django.db.models.functions import ExtractHour
from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes
@ -37,6 +35,7 @@ from authentik.lib.utils.file import (
from authentik.policies.api.exec import PolicyTestResultSerializer
from authentik.policies.engine import PolicyEngine
from authentik.policies.types import PolicyResult
from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED
LOGGER = get_logger()
@ -62,6 +61,7 @@ class ApplicationSerializer(ModelSerializer):
return app.get_launch_url(user)
class Meta:
model = Application
fields = [
"pk",
@ -185,6 +185,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if superuser_full_list and request.user.is_superuser:
return super().list(request)
# To prevent the user from having to double login when prompt is set to login
# and the user has just signed it. This session variable is set in the UserLoginStage
# and is (quite hackily) removed from the session in applications's API's List method
self.request.session.pop(USER_LOGIN_AUTHENTICATED, None)
queryset = self._filter_queryset_for_list(self.get_queryset())
self.paginate_queryset(queryset)
@ -221,6 +225,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
methods=["POST"],
parser_classes=(MultiPartParser,),
)
# pylint: disable=unused-argument
def set_icon(self, request: Request, slug: str):
"""Set application icon"""
app: Application = self.get_object()
@ -240,6 +245,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
filter_backends=[],
methods=["POST"],
)
# pylint: disable=unused-argument
def set_icon_url(self, request: Request, slug: str):
"""Set application icon (as URL)"""
app: Application = self.get_object()
@ -248,14 +254,15 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
def metrics(self, request: Request, slug: str):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event").filter(
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
.get_events_per_hour()
)

View File

@ -74,6 +74,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
return GEOIP_READER.city_dict(instance.last_ip)
class Meta:
model = AuthenticatedSession
fields = [
"uuid",

View File

@ -24,10 +24,12 @@ from authentik.core.models import Group, User
class GroupMemberSerializer(ModelSerializer):
"""Stripped down user serializer to show relevant users for groups"""
avatar = CharField(read_only=True)
attributes = JSONField(validators=[is_dict], required=False)
uid = CharField(read_only=True)
class Meta:
model = User
fields = [
"pk",
@ -36,6 +38,7 @@ class GroupMemberSerializer(ModelSerializer):
"is_active",
"last_login",
"email",
"avatar",
"attributes",
"uid",
]
@ -53,6 +56,7 @@ class GroupSerializer(ModelSerializer):
num_pk = IntegerField(read_only=True)
class Meta:
model = Group
fields = [
"pk",
@ -92,6 +96,7 @@ class GroupFilter(FilterSet):
queryset=User.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args"""
try:
@ -110,6 +115,7 @@ class GroupFilter(FilterSet):
return queryset
class Meta:
model = Group
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
@ -151,6 +157,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument, invalid-name
def add_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()
@ -175,6 +182,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument, invalid-name
def remove_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()

View File

@ -49,6 +49,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
return expression
class Meta:
model = PropertyMapping
fields = [
"pk",
@ -116,6 +117,7 @@ class PropertyMappingViewSet(
],
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument, invalid-name
def test(self, request: Request, pk: str) -> Response:
"""Test Property Mapping"""
mapping: PropertyMapping = self.get_object()

View File

@ -31,11 +31,11 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
return obj.component
class Meta:
model = Provider
fields = [
"pk",
"name",
"authentication_flow",
"authorization_flow",
"property_mappings",
"component",
@ -45,9 +45,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
"verbose_name_plural",
"meta_model_name",
]
extra_kwargs = {
"authorization_flow": {"required": True, "allow_null": False},
}
class ProviderViewSet(

View File

@ -46,6 +46,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
return obj.component
class Meta:
model = Source
fields = [
"pk",
@ -101,6 +102,7 @@ class SourceViewSet(
methods=["POST"],
parser_classes=(MultiPartParser,),
)
# pylint: disable=unused-argument
def set_icon(self, request: Request, slug: str):
"""Set source icon"""
source: Source = self.get_object()
@ -120,6 +122,7 @@ class SourceViewSet(
filter_backends=[],
methods=["POST"],
)
# pylint: disable=unused-argument
def set_icon_url(self, request: Request, slug: str):
"""Set source icon (as URL)"""
source: Source = self.get_object()
@ -206,6 +209,5 @@ class UserSourceConnectionViewSet(
queryset = UserSourceConnection.objects.all()
serializer_class = UserSourceConnectionSerializer
permission_classes = [OwnerSuperuserPermissions]
filterset_fields = ["user"]
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
ordering = ["pk"]

View File

@ -16,7 +16,6 @@ from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.api.decorators import permission_required
from authentik.blueprints.api import ManagedSerializer
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import PassiveSerializer
@ -30,27 +29,17 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
user_obj = UserSerializer(required=False, source="user", read_only=True)
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
self.fields["key"] = CharField()
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
"""Ensure only API or App password tokens are created."""
request: Request = self.context.get("request")
if not request:
if "user" not in attrs:
raise ValidationError("Missing user")
if "intent" not in attrs:
raise ValidationError("Missing intent")
else:
attrs.setdefault("user", request.user)
request: Request = self.context["request"]
attrs.setdefault("user", request.user)
attrs.setdefault("intent", TokenIntents.INTENT_API)
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
return attrs
class Meta:
model = Token
fields = [
"pk",
@ -123,6 +112,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
}
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
# pylint: disable=unused-argument
def view_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
token: Token = self.get_object()
@ -144,11 +134,11 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument
def set_key(self, request: Request, identifier: str) -> Response:
"""Set token key. Action is logged as event. `authentik_core.set_token_key` permission
is required."""
"""Return token key and log access"""
token: Token = self.get_object()
key = request.data.get("key")
key = request.POST.get("key")
if not key:
return Response(status=400)
token.key = key

View File

@ -53,7 +53,7 @@ class UsedByMixin:
responses={200: UsedBySerializer(many=True)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=too-many-locals
# pylint: disable=invalid-name, unused-argument, too-many-locals
def used_by(self, request: Request, *args, **kwargs) -> Response:
"""Get a list of all objects that use this object"""
# pyright: reportGeneralTypeIssues=false

View File

@ -4,12 +4,10 @@ from json import loads
from typing import Any, Optional
from django.contrib.auth import update_session_auth_hash
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.db.models.functions import ExtractHour
from django.db.models.query import QuerySet
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.urls import reverse_lazy
from django.utils.http import urlencode
from django.utils.text import slugify
from django.utils.timezone import now
@ -37,13 +35,11 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import (
BooleanField,
DateTimeField,
ListSerializer,
ModelSerializer,
PrimaryKeyRelatedField,
ValidationError,
)
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from structlog.stdlib import get_logger
@ -60,23 +56,19 @@ from authentik.core.models import (
USER_ATTRIBUTE_SA,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT,
AuthenticatedSession,
Group,
Token,
TokenIntents,
User,
)
from authentik.events.models import EventAction
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import FlowToken
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.interfaces.models import InterfaceType
from authentik.interfaces.views import reverse_interface
from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage
from authentik.tenants.utils import get_tenant
from authentik.tenants.models import Tenant
LOGGER = get_logger()
@ -88,6 +80,7 @@ class UserGroupSerializer(ModelSerializer):
parent_name = CharField(source="parent.name", read_only=True)
class Meta:
model = Group
fields = [
"pk",
@ -111,7 +104,7 @@ class UserSerializer(ModelSerializer):
)
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
uid = CharField(read_only=True)
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
username = CharField(max_length=150)
def validate_path(self, path: str) -> str:
"""Validate path"""
@ -123,6 +116,7 @@ class UserSerializer(ModelSerializer):
return path
class Meta:
model = User
fields = [
"pk",
@ -174,6 +168,7 @@ class UserSelfSerializer(ModelSerializer):
return user.group_attributes(self._context["request"]).get("settings", {})
class Meta:
model = User
fields = [
"pk",
@ -204,47 +199,38 @@ class SessionUserSerializer(PassiveSerializer):
class UserMetricsSerializer(PassiveSerializer):
"""User Metrics"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
logins_per_1h = SerializerMethodField()
logins_failed_per_1h = SerializerMethodField()
authorizations_per_1h = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN, user__pk=user.pk)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED, context__username=user.username
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_authorizations_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
request = self.context["request"]
return (
get_objects_for_user(request.user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
.get_events_per_hour()
)
@ -276,6 +262,7 @@ class UsersFilter(FilterSet):
queryset=Group.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args"""
try:
@ -322,7 +309,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]:
"""Create a recovery link (when the current tenant has a recovery flow set),
that can either be shown to an admin or sent to the user directly"""
tenant = get_tenant(self.request)
tenant: Tenant = self.request._request.tenant
# Check that there is a recovery flow, if not return an error
flow = tenant.flow_recovery
if not flow:
@ -331,16 +318,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
user: User = self.get_object()
planner = FlowPlanner(flow)
planner.allow_empty_flows = True
try:
plan = planner.plan(
self.request._request,
{
PLAN_CONTEXT_PENDING_USER: user,
},
)
except FlowNonApplicableException:
LOGGER.warning("Recovery flow not applicable to user")
return None, None
plan = planner.plan(
self.request._request,
{
PLAN_CONTEXT_PENDING_USER: user,
},
)
token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset",
defaults={
@ -351,12 +334,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
)
querystring = urlencode({QS_KEY_TOKEN: token.key})
link = self.request.build_absolute_uri(
reverse_interface(
self.request,
InterfaceType.FLOW,
flow_slug=flow.slug,
),
+f"?{querystring}",
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
+ f"?{querystring}"
)
return link, token
@ -367,11 +346,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
{
"name": CharField(required=True),
"create_group": BooleanField(default=False),
"expiring": BooleanField(default=True),
"expires": DateTimeField(
required=False,
help_text="If not provided, valid for 360 days",
),
},
),
responses={
@ -392,20 +366,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""Create a new user account that is marked as a service account"""
username = request.data.get("name")
create_group = request.data.get("create_group", False)
expiring = request.data.get("expiring", True)
expires = request.data.get("expires", now() + timedelta(days=360))
with atomic():
try:
user: User = User.objects.create(
user = User.objects.create(
username=username,
name=username,
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
path=USER_PATH_SERVICE_ACCOUNT,
)
user.set_unusable_password()
user.save()
response = {
"username": user.username,
"user_uid": user.uid,
@ -421,17 +389,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
identifier=slugify(f"service-account-{username}-password"),
intent=TokenIntents.INTENT_APP_PASSWORD,
user=user,
expires=expires,
expiring=expiring,
expires=now() + timedelta(days=360),
)
response["token"] = token.key
return Response(response)
except IntegrityError as exc:
except (IntegrityError) as exc:
return Response(data={"non_field_errors": [str(exc)]}, status=400)
@extend_schema(responses={200: SessionUserSerializer(many=False)})
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
def user_me(self, request: Request) -> Response:
@action(detail=False, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name
def me(self, request: Request) -> Response:
"""Get information about current user"""
context = {"request": request}
serializer = SessionUserSerializer(
@ -459,6 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"])
# pylint: disable=invalid-name, unused-argument
def set_password(self, request: Request, pk: int) -> Response:
"""Set password for user"""
user: User = self.get_object()
@ -476,12 +445,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def metrics(self, request: Request, pk: int) -> Response:
"""User metrics per 1h"""
user: User = self.get_object()
serializer = UserMetricsSerializer(instance={})
serializer = UserMetricsSerializer(True)
serializer.context["user"] = user
serializer.context["request"] = request
return Response(serializer.data)
@permission_required("authentik_core.reset_user_password")
@ -492,6 +461,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def recovery(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
link, _ = self._create_recovery_link()
@ -516,6 +486,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def recovery_email(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
for_user: User = self.get_object()
@ -589,14 +560,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
)
}
)
def partial_update(self, request: Request, *args, **kwargs) -> Response:
response = super().partial_update(request, *args, **kwargs)
instance: User = self.get_object()
if not instance.is_active:
sessions = AuthenticatedSession.objects.filter(user=instance)
session_ids = sessions.values_list("session_key", flat=True)
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
sessions.delete()
LOGGER.debug("Deleted user's sessions", user=instance.username)
return response

View File

@ -11,7 +11,6 @@ class AuthentikCoreConfig(ManagedAppConfig):
label = "authentik_core"
verbose_name = "authentik Core"
mountpoint = ""
ws_mountpoint = "authentik.core.urls"
default = True
def reconcile_load_core_signals(self):

View File

@ -1,9 +1,8 @@
"""Property Mapping Evaluator"""
from typing import Any, Optional
from typing import Optional
from django.db.models import Model
from django.http import HttpRequest
from prometheus_client import Histogram
from authentik.core.models import User
from authentik.events.models import Event, EventAction
@ -11,24 +10,15 @@ from authentik.lib.expression.evaluator import BaseEvaluator
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.types import PolicyRequest
PROPERTY_MAPPING_TIME = Histogram(
"authentik_property_mapping_execution_time",
"Evaluation time of property mappings",
["mapping_name"],
)
class PropertyMappingEvaluator(BaseEvaluator):
"""Custom Evaluator that adds some different context variables."""
dry_run: bool
def __init__(
self,
model: Model,
user: Optional[User] = None,
request: Optional[HttpRequest] = None,
dry_run: Optional[bool] = False,
**kwargs,
):
if hasattr(model, "name"):
@ -45,13 +35,9 @@ class PropertyMappingEvaluator(BaseEvaluator):
req.http_request = request
self._context["request"] = req
self._context.update(**kwargs)
self.dry_run = dry_run
def handle_error(self, exc: Exception, expression_source: str):
"""Exception Handler"""
# For dry-run requests we don't save exceptions
if self.dry_run:
return
error_string = exception_to_string(exc)
event = Event.new(
EventAction.PROPERTY_MAPPING_EXCEPTION,
@ -63,7 +49,3 @@ class PropertyMappingEvaluator(BaseEvaluator):
event.from_http(req.http_request, req.user)
return
event.save()
def evaluate(self, *args, **kwargs) -> Any:
with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time():
return super().evaluate(*args, **kwargs)

View File

@ -49,6 +49,7 @@ class Command(BaseCommand):
return namespace
@staticmethod
# pylint: disable=unused-argument
def post_save_handler(sender, instance: Model, created: bool, **_):
"""Signal handler for all object's post_save"""
if not should_log_model(instance):
@ -64,6 +65,7 @@ class Command(BaseCommand):
).save()
@staticmethod
# pylint: disable=unused-argument
def pre_delete_handler(sender, instance: Model, **_):
"""Signal handler for all object's pre_delete"""
if not should_log_model(instance): # pragma: no cover

View File

@ -14,6 +14,7 @@ import authentik.core.models
class Migration(migrations.Migration):
initial = True
dependencies = [
@ -43,10 +44,7 @@ class Migration(migrations.Migration):
"is_superuser",
models.BooleanField(
default=False,
help_text=(
"Designates that this user has all permissions without explicitly"
" assigning them."
),
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
@ -54,9 +52,7 @@ class Migration(migrations.Migration):
"username",
models.CharField(
error_messages={"unique": "A user with that username already exists."},
help_text=(
"Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only."
),
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
@ -87,10 +83,7 @@ class Migration(migrations.Migration):
"is_active",
models.BooleanField(
default=True,
help_text=(
"Designates whether this user should be treated as active. Unselect"
" this instead of deleting accounts."
),
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),

View File

@ -18,13 +18,13 @@ def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
akadmin, _ = User.objects.using(db_alias).get_or_create(
username="akadmin",
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
name="authentik Default Admin",
username="akadmin", email="root@localhost", name="authentik Default Admin"
)
password = None
if "TF_BUILD" in environ or settings.TEST:
password = "akadmin" # noqa # nosec
if "AK_ADMIN_PASS" in environ:
password = environ["AK_ADMIN_PASS"]
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
if password:
@ -51,6 +51,7 @@ def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0002_auto_20200523_1133"),
("authentik_core", "0003_default_user"),
@ -171,10 +172,7 @@ class Migration(migrations.Migration):
name="groups",
field=models.ManyToManyField(
blank=True,
help_text=(
"The groups this user belongs to. A user will get all permissions granted to"
" each of their groups."
),
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",

View File

@ -17,6 +17,7 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0012_auto_20201003_1737"),
("authentik_core", "0013_auto_20201003_2132"),

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0016_auto_20201202_2234"),
]
@ -14,12 +15,7 @@ class Migration(migrations.Migration):
name="managed",
field=models.TextField(
default=None,
help_text=(
"Objects which are managed by authentik. These objects are created and updated"
" automatically. This is flag only indicates that an object can be overwritten"
" by migrations. You can still modify the objects via the API, but expect"
" changes to be overwritten in a later update."
),
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
verbose_name="Managed by authentik",
unique=True,
@ -30,12 +26,7 @@ class Migration(migrations.Migration):
name="managed",
field=models.TextField(
default=None,
help_text=(
"Objects which are managed by authentik. These objects are created and updated"
" automatically. This is flag only indicates that an object can be overwritten"
" by migrations. You can still modify the objects via the API, but expect"
" changes to be overwritten in a later update."
),
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
verbose_name="Managed by authentik",
unique=True,

View File

@ -46,9 +46,13 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
akadmin = User.objects.using(db_alias).filter(username="akadmin")
if not akadmin.exists():
return
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
key = None
if "AK_ADMIN_TOKEN" in environ:
key = environ["AK_ADMIN_TOKEN"]
if "AUTHENTIK_BOOTSTRAP_TOKEN" in environ:
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
if not key:
return
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
Token.objects.using(db_alias).create(
identifier="authentik-bootstrap-token",
user=akadmin.first(),
@ -59,6 +63,7 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0018_auto_20210330_1345"),
("authentik_core", "0019_source_managed"),
@ -91,12 +96,7 @@ class Migration(migrations.Migration):
name="managed",
field=models.TextField(
default=None,
help_text=(
"Objects which are managed by authentik. These objects are created and updated"
" automatically. This is flag only indicates that an object can be overwritten"
" by migrations. You can still modify the objects via the API, but expect"
" changes to be overwritten in a later update."
),
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
@ -110,38 +110,23 @@ class Migration(migrations.Migration):
("identifier", "Use the source-specific identifier"),
(
"email_link",
(
"Link to a user with identical email address. Can have security"
" implications when a source doesn't validate email addresses."
),
"Link to a user with identical email address. Can have security implications when a source doesn't validate email addresses.",
),
(
"email_deny",
(
"Use the user's email address, but deny enrollment when the email"
" address already exists."
),
"Use the user's email address, but deny enrollment when the email address already exists.",
),
(
"username_link",
(
"Link to a user with identical username. Can have security implications"
" when a username is used with another source."
),
"Link to a user with identical username. Can have security implications when a username is used with another source.",
),
(
"username_deny",
(
"Use the user's username, but deny enrollment when the username already"
" exists."
),
"Use the user's username, but deny enrollment when the username already exists.",
),
],
default="identifier",
help_text=(
"How the source determines if an existing user should be authenticated or a new"
" user enrolled."
),
help_text="How the source determines if an existing user should be authenticated or a new user enrolled.",
),
),
migrations.AlterField(
@ -182,9 +167,7 @@ class Migration(migrations.Migration):
model_name="application",
name="meta_launch_url",
field=models.TextField(
blank=True,
default="",
validators=[authentik.lib.models.DomainlessFormattedURLValidator()],
blank=True, default="", validators=[authentik.lib.models.DomainlessURLValidator()]
),
),
migrations.RunPython(

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
]

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0019_application_group"),
]

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0020_application_open_in_new_tab"),
]

View File

@ -5,6 +5,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0021_source_user_path_user_path"),
]

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0022_alter_group_parent"),
]

View File

@ -4,6 +4,7 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0023_source_authentik_c_slug_ccb2e5_idx_and_more"),
]

View File

@ -1,25 +0,0 @@
# Generated by Django 4.1.7 on 2023-03-02 21:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
("authentik_core", "0024_source_icon"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="authorization_flow",
field=models.ForeignKey(
help_text="Flow used when authorizing this provider.",
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="provider_authorization",
to="authentik_flows.flow",
),
),
]

View File

@ -1,26 +0,0 @@
# Generated by Django 4.1.7 on 2023-03-07 13:41
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0025_alter_provider_authorization_flow"),
]
operations = [
migrations.RunPython(fallback_names("authentik_core", "propertymapping", "name")),
migrations.RunPython(fallback_names("authentik_core", "provider", "name")),
migrations.AlterField(
model_name="propertymapping",
name="name",
field=models.TextField(unique=True),
),
migrations.AlterField(
model_name="provider",
name="name",
field=models.TextField(unique=True),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 4.1.7 on 2023-03-19 21:57
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0026_alter_propertymapping_name_alter_provider_name"),
]
operations = [
migrations.AlterField(
model_name="user",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True),
),
]

View File

@ -1,25 +0,0 @@
# Generated by Django 4.1.7 on 2023-03-23 21:44
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
("authentik_core", "0027_alter_user_uuid"),
]
operations = [
migrations.AddField(
model_name="provider",
name="authentication_flow",
field=models.ForeignKey(
help_text="Flow used for authentication when the associated application is accessed by an un-authenticated user.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="provider_authentication",
to="authentik_flows.flow",
),
),
]

View File

@ -1,7 +1,8 @@
"""authentik core models"""
from datetime import timedelta
from hashlib import sha256
from hashlib import md5, sha256
from typing import Any, Optional
from urllib.parse import urlencode
from uuid import uuid4
from deepmerge import always_merger
@ -12,7 +13,9 @@ from django.contrib.auth.models import UserManager as DjangoUserManager
from django.db import models
from django.db.models import Q, QuerySet, options
from django.http import HttpRequest
from django.templatetags.static import static
from django.utils.functional import SimpleLazyObject, cached_property
from django.utils.html import escape
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from guardian.mixins import GuardianUserMixin
@ -22,18 +25,13 @@ from structlog.stdlib import get_logger
from authentik.blueprints.models import ManagedModel
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.signals import password_changed
from authentik.core.types import UILoginButton, UserSettingSerializer
from authentik.lib.avatars import get_avatar
from authentik.lib.config import CONFIG
from authentik.lib.config import CONFIG, get_path_from_dict
from authentik.lib.generators import generate_id
from authentik.lib.models import (
CreatedUpdatedModel,
DomainlessFormattedURLValidator,
SerializerModel,
)
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
from authentik.lib.utils.http import get_client_ip
from authentik.policies.models import PolicyBindingModel
from authentik.tenants.utils import get_tenant
LOGGER = get_logger()
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
@ -51,6 +49,9 @@ USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
GRAVATAR_URL = "https://secure.gravatar.com"
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ("authentik_used_by_shadows",)
@ -128,6 +129,7 @@ class Group(SerializerModel):
return f"Group {self.name}"
class Meta:
unique_together = (
(
"name",
@ -147,7 +149,7 @@ class UserManager(DjangoUserManager):
class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""Custom User model to allow easier adding of user-based settings"""
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
uuid = models.UUIDField(default=uuid4, editable=False)
name = models.TextField(help_text=_("User's display name."))
path = models.TextField(default="users")
@ -169,7 +171,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
including the users attributes"""
final_attributes = {}
if request and hasattr(request, "tenant"):
always_merger.merge(final_attributes, get_tenant(request).attributes)
always_merger.merge(final_attributes, request.tenant.attributes)
for group in self.ak_groups.all().order_by("name"):
always_merger.merge(final_attributes, group.attributes)
always_merger.merge(final_attributes, self.attributes)
@ -193,8 +195,6 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
def set_password(self, raw_password, signal=True):
if self.pk and signal:
from authentik.core.signals import password_changed
password_changed.send(sender=self, user=self, password=raw_password)
self.password_change_date = now()
return super().set_password(raw_password)
@ -228,15 +228,34 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
except Exception as exc:
LOGGER.warning("Failed to get default locale", exc=exc)
if request:
return get_tenant(request).default_locale
return request.tenant.locale
return ""
@property
def avatar(self) -> str:
"""Get avatar, depending on authentik.avatar setting"""
return get_avatar(self)
mode: str = CONFIG.y("avatars", "none")
if mode == "none":
return DEFAULT_AVATAR
if mode.startswith("attributes."):
return get_path_from_dict(self.attributes, mode[11:], default=DEFAULT_AVATAR)
# gravatar uses md5 for their URLs, so md5 can't be avoided
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
if mode == "gravatar":
parameters = [
("s", "158"),
("r", "g"),
]
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
return escape(gravatar_url)
return mode % {
"username": self.username,
"mail_hash": mail_hash,
"upn": self.attributes.get("upn", ""),
}
class Meta:
permissions = (
("reset_user_password", "Reset Password"),
("impersonate", "Can impersonate other users"),
@ -248,23 +267,11 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
class Provider(SerializerModel):
"""Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application"""
name = models.TextField(unique=True)
authentication_flow = models.ForeignKey(
"authentik_flows.Flow",
null=True,
on_delete=models.SET_NULL,
help_text=_(
"Flow used for authentication when the associated application is accessed by an "
"un-authenticated user."
),
related_name="provider_authentication",
)
name = models.TextField()
authorization_flow = models.ForeignKey(
"authentik_flows.Flow",
on_delete=models.CASCADE,
null=True,
help_text=_("Flow used when authorizing this provider."),
related_name="provider_authorization",
)
@ -307,7 +314,7 @@ class Application(SerializerModel, PolicyBindingModel):
)
meta_launch_url = models.TextField(
default="", blank=True, validators=[DomainlessFormattedURLValidator()]
default="", blank=True, validators=[DomainlessURLValidator()]
)
open_in_new_tab = models.BooleanField(
@ -375,6 +382,7 @@ class Application(SerializerModel, PolicyBindingModel):
return str(self.name)
class Meta:
verbose_name = _("Application")
verbose_name_plural = _("Applications")
@ -384,15 +392,19 @@ class SourceUserMatchingModes(models.TextChoices):
IDENTIFIER = "identifier", _("Use the source-specific identifier")
EMAIL_LINK = "email_link", _(
"Link to a user with identical email address. Can have security implications "
"when a source doesn't validate email addresses."
(
"Link to a user with identical email address. Can have security implications "
"when a source doesn't validate email addresses."
)
)
EMAIL_DENY = "email_deny", _(
"Use the user's email address, but deny enrollment when the email address already exists."
)
USERNAME_LINK = "username_link", _(
"Link to a user with identical username. Can have security implications "
"when a username is used with another source."
(
"Link to a user with identical username. Can have security implications "
"when a username is used with another source."
)
)
USERNAME_DENY = "username_deny", _(
"Use the user's username, but deny enrollment when the username already exists."
@ -439,8 +451,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
choices=SourceUserMatchingModes.choices,
default=SourceUserMatchingModes.IDENTIFIER,
help_text=_(
"How the source determines if an existing user should be authenticated or "
"a new user enrolled."
(
"How the source determines if an existing user should be authenticated or "
"a new user enrolled."
)
),
)
@ -486,6 +500,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
return str(self.name)
class Meta:
indexes = [
models.Index(
fields=[
@ -514,6 +529,7 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
raise NotImplementedError
class Meta:
unique_together = (("user", "source"),)
@ -546,6 +562,7 @@ class ExpiringModel(models.Model):
return now() > self.expires
class Meta:
abstract = True
@ -611,6 +628,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
return description
class Meta:
verbose_name = _("Token")
verbose_name_plural = _("Tokens")
indexes = [
@ -624,7 +642,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
"""User-defined key -> x mapping which can be used by providers to expose extra data."""
pm_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
expression = models.TextField()
objects = InheritanceManager()
@ -647,12 +665,13 @@ class PropertyMapping(SerializerModel, ManagedModel):
try:
return evaluator.evaluate(self.expression)
except Exception as exc:
raise PropertyMappingExpressionException(exc) from exc
raise PropertyMappingExpressionException(str(exc)) from exc
def __str__(self):
return f"Property Mapping {self.name}"
class Meta:
verbose_name = _("Property Mapping")
verbose_name_plural = _("Property Mappings")
@ -689,5 +708,6 @@ class AuthenticatedSession(ExpiringModel):
)
class Meta:
verbose_name = _("Authenticated Session")
verbose_name_plural = _("Authenticated Sessions")

View File

@ -10,33 +10,36 @@ from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.http.request import HttpRequest
from authentik.core.models import Application, AuthenticatedSession
# Arguments: user: User, password: str
password_changed = Signal()
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
login_failed = Signal()
if TYPE_CHECKING:
from authentik.core.models import User
from authentik.core.models import AuthenticatedSession, User
@receiver(post_save, sender=Application)
@receiver(post_save)
# pylint: disable=unused-argument
def post_save_application(sender: type[Model], instance, created: bool, **_):
"""Clear user's application cache upon application creation"""
from authentik.core.api.applications import user_app_cache_key
from authentik.core.models import Application
if sender != Application:
return
if not created: # pragma: no cover
return
# Also delete user application cache
keys = cache.keys(user_app_cache_key("*"))
cache.delete_many(keys)
@receiver(user_logged_in)
# pylint: disable=unused-argument
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
"""Create an AuthenticatedSession from request"""
from authentik.core.models import AuthenticatedSession
session = AuthenticatedSession.from_request(request, user)
if session:
@ -44,13 +47,21 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
@receiver(user_logged_out)
# pylint: disable=unused-argument
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
"""Delete AuthenticatedSession if it exists"""
from authentik.core.models import AuthenticatedSession
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
@receiver(pre_delete, sender=AuthenticatedSession)
@receiver(pre_delete)
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
"""Delete session when authenticated session is deleted"""
from authentik.core.models import AuthenticatedSession
if sender != AuthenticatedSession:
return
cache_key = f"{KEY_PREFIX}{instance.session_key}"
cache.delete(cache_key)

Some files were not shown because too many files have changed in this diff Show More