Compare commits
3 Commits
stages/ema
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
b0e543a498 | |||
2fc3db2365 | |||
ecf9c8fcdd |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2023.4.1
|
||||
current_version = 2023.1.3
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
@ -6,4 +6,3 @@ dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
Dockerfile
|
||||
authentik/enterprise
|
||||
|
@ -7,14 +7,8 @@ charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.html]
|
||||
[html]
|
||||
indent_size = 2
|
||||
|
||||
[*.{yaml,yml}]
|
||||
[yaml]
|
||||
indent_size = 2
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,9 +1,10 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ""
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ""
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
@ -11,7 +12,6 @@ A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
@ -27,9 +27,8 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,9 +1,10 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ""
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ""
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
|
10
.github/ISSUE_TEMPLATE/question.md
vendored
10
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,9 +1,10 @@
|
||||
---
|
||||
name: Question
|
||||
about: Ask a question about a feature or specific configuration
|
||||
title: ""
|
||||
title: ''
|
||||
labels: question
|
||||
assignees: ""
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe your question/**
|
||||
@ -19,9 +20,8 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
@ -1,5 +1,5 @@
|
||||
name: "Comment usage instructions on PRs"
|
||||
description: "Comment usage instructions on PRs"
|
||||
name: 'Comment usage instructions on PRs'
|
||||
description: 'Comment usage instructions on PRs'
|
||||
|
||||
inputs:
|
||||
tag:
|
||||
@ -17,7 +17,7 @@ runs:
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: authentik PR Installation instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
|
10
.github/actions/docker-push-variables/action.yml
vendored
10
.github/actions/docker-push-variables/action.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: "Prepare docker environment variables"
|
||||
description: "Prepare docker environment variables"
|
||||
name: 'Prepare docker environment variables'
|
||||
description: 'Prepare docker environment variables'
|
||||
|
||||
outputs:
|
||||
shouldBuild:
|
||||
@ -51,14 +51,12 @@ runs:
|
||||
version_family = ".".join(version.split(".")[:-1])
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print("branchName=%s" % branch_name, file=_output)
|
||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||
print("timestamp=%s" % int(time()), file=_output)
|
||||
print("sha=%s" % sha, file=_output)
|
||||
print("shortHash=%s" % sha[:7], file=_output)
|
||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
|
||||
print("shouldBuild=%s" % should_build, file=_output)
|
||||
print("version=%s" % version, file=_output)
|
||||
print("versionFamily=%s" % version_family, file=_output)
|
||||
|
20
.github/actions/setup/action.yml
vendored
20
.github/actions/setup/action.yml
vendored
@ -1,10 +1,5 @@
|
||||
name: "Setup authentik testing environment"
|
||||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_tag:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
name: 'Setup authentik testing environment'
|
||||
description: 'Setup authentik testing environment'
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@ -18,18 +13,17 @@ runs:
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
python-version: '3.11'
|
||||
cache: 'poetry'
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v3.1.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
|
14
.github/actions/setup/docker-compose.yml
vendored
14
.github/actions/setup/docker-compose.yml
vendored
@ -1,21 +1,23 @@
|
||||
version: "3.7"
|
||||
version: '3.7'
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-12}
|
||||
container_name: postgres
|
||||
image: library/postgres:12
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
- db-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: authentik
|
||||
ports:
|
||||
- 5432:5432
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
image: docker.io/library/redis
|
||||
container_name: redis
|
||||
image: library/redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
11
.github/codecov.yml
vendored
11
.github/codecov.yml
vendored
@ -1,10 +1,3 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# adjust accordingly based on how flaky your tests are
|
||||
# this allows a 1% drop from the previous base commit coverage
|
||||
threshold: 1%
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
precision: 2
|
||||
round: up
|
||||
|
1
.github/codespell-dictionary.txt
vendored
1
.github/codespell-dictionary.txt
vendored
@ -1 +0,0 @@
|
||||
authentic->authentik
|
120
.github/dependabot.yml
vendored
120
.github/dependabot.yml
vendored
@ -1,62 +1,62 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "ci:"
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "website:"
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
reviewers:
|
||||
- "@goauthentik/core"
|
||||
commit-message:
|
||||
prefix: "core:"
|
||||
|
32
.github/pull_request_template.md
vendored
32
.github/pull_request_template.md
vendored
@ -5,35 +5,15 @@ Please check the [Contributing guidelines](https://github.com/goauthentik/authen
|
||||
-->
|
||||
|
||||
# Details
|
||||
|
||||
- **Does this resolve an issue?**
|
||||
Resolves #
|
||||
* **Does this resolve an issue?**
|
||||
Resolves #
|
||||
|
||||
## Changes
|
||||
|
||||
### New Features
|
||||
|
||||
- Adds feature which does x, y, and z.
|
||||
* Adds feature which does x, y, and z.
|
||||
|
||||
### Breaking Changes
|
||||
* Adds breaking change which causes \<issue\>.
|
||||
|
||||
- Adds breaking change which causes \<issue\>.
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Local tests pass (`ak test authentik/`)
|
||||
- [ ] The code has been formatted (`make lint-fix`)
|
||||
|
||||
If an API change has been made
|
||||
|
||||
- [ ] The API schema has been updated (`make gen-build`)
|
||||
|
||||
If changes to the frontend have been made
|
||||
|
||||
- [ ] The code has been formatted (`make web`)
|
||||
- [ ] The translation files have been updated (`make i18n-extract`)
|
||||
|
||||
If applicable
|
||||
|
||||
- [ ] The documentation has been updated
|
||||
- [ ] The documentation has been formatted (`make website`)
|
||||
## Additional
|
||||
Any further notes or comments you want to make.
|
||||
|
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -16,4 +16,3 @@ markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
only: issues
|
||||
|
4
.github/transifex.yml
vendored
4
.github/transifex.yml
vendored
@ -6,11 +6,11 @@ git:
|
||||
source_language: en
|
||||
source_file: web/src/locales/en.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: "web/src/locales/<lang>.po"
|
||||
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||
- filter_type: file
|
||||
# all supported i18n types: https://docs.transifex.com/formats
|
||||
file_format: PO
|
||||
source_language: en
|
||||
source_file: locale/en/LC_MESSAGES/django.po
|
||||
# path expression to translation files, must contain <lang> placeholder
|
||||
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
|
||||
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
||||
|
36
.github/workflows/ci-main.yml
vendored
36
.github/workflows/ci-main.yml
vendored
@ -23,14 +23,12 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- bandit
|
||||
- black
|
||||
- codespell
|
||||
- isort
|
||||
- pending-migrations
|
||||
- pylint
|
||||
- black
|
||||
- isort
|
||||
- bandit
|
||||
- pyright
|
||||
- ruff
|
||||
- pending-migrations
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@ -61,7 +59,7 @@ jobs:
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
@ -81,21 +79,11 @@ jobs:
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 11-alpine
|
||||
- 12-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
@ -106,7 +94,6 @@ jobs:
|
||||
flags: unit
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
@ -124,7 +111,6 @@ jobs:
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -139,8 +125,6 @@ jobs:
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: radius
|
||||
glob: tests/e2e/test_provider_radius*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
@ -187,8 +171,6 @@ jobs:
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -206,7 +188,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
@ -214,7 +196,6 @@ jobs:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
@ -231,8 +212,6 @@ jobs:
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -250,7 +229,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
@ -258,7 +237,6 @@ jobs:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
|
38
.github/workflows/ci-outpost.yml
vendored
38
.github/workflows/ci-outpost.yml
vendored
@ -15,9 +15,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version: "^1.17"
|
||||
- name: Prepare and generate API
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
@ -30,14 +30,13 @@ jobs:
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
args: --timeout 5000s
|
||||
skip-pkg-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version: "^1.17"
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
@ -50,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build-container:
|
||||
build:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -60,12 +59,11 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
arch:
|
||||
- 'linux/amd64'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -85,7 +83,7 @@ jobs:
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
@ -95,9 +93,8 @@ jobs:
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
build-binary:
|
||||
platforms: ${{ matrix.arch }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- ci-outpost-mark
|
||||
@ -108,20 +105,17 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
@ -136,3 +130,7 @@ jobs:
|
||||
export GOOS=${{ matrix.goos }}
|
||||
export GOARCH=${{ matrix.goarch }}
|
||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||
|
20
.github/workflows/ci-web.yml
vendored
20
.github/workflows/ci-web.yml
vendored
@ -17,8 +17,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -33,8 +33,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -49,8 +49,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
@ -65,8 +65,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: |
|
||||
@ -97,8 +97,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
|
41
.github/workflows/ci-website.yml
vendored
41
.github/workflows/ci-website.yml
vendored
@ -17,54 +17,17 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: prettier
|
||||
working-directory: website/
|
||||
run: npm run prettier-check
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: test
|
||||
working-directory: website/
|
||||
run: npm test
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.job }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
run: npm ci
|
||||
- name: build
|
||||
working-directory: website/
|
||||
run: npm run ${{ matrix.job }}
|
||||
ci-website-mark:
|
||||
needs:
|
||||
- lint-prettier
|
||||
- test
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
|
56
.github/workflows/codeql-analysis.yml
vendored
56
.github/workflows/codeql-analysis.yml
vendored
@ -2,11 +2,12 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, "*", next, version*]
|
||||
branches: [ main, '*', next, version* ]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: "30 6 * * 5"
|
||||
- cron: '30 6 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
@ -20,17 +21,40 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["go", "javascript", "python"]
|
||||
language: [ 'go', 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
4
.github/workflows/ghcr-retention.yml
vendored
4
.github/workflows/ghcr-retention.yml
vendored
@ -2,7 +2,7 @@ name: ghcr-retention
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: snok/container-retention-policy@v2
|
||||
uses: snok/container-retention-policy@v1
|
||||
with:
|
||||
image-names: dev-server,dev-ldap,dev-proxy
|
||||
cut-off: One week ago UTC
|
||||
|
22
.github/workflows/release-publish.yml
vendored
22
.github/workflows/release-publish.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
@ -52,12 +52,11 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version: "^1.17"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
- name: Set up Docker Buildx
|
||||
@ -77,7 +76,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
@ -100,18 +99,17 @@ jobs:
|
||||
type:
|
||||
- proxy
|
||||
- ldap
|
||||
- radius
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version: "^1.17"
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
node-version: '16'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
@ -173,5 +171,5 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
with:
|
||||
version: authentik@${{ steps.ev.outputs.version }}
|
||||
sourcemaps: "./web/dist"
|
||||
url_prefix: "~/static/dist"
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
2
.github/workflows/release-tag.yml
vendored
2
.github/workflows/release-tag.yml
vendored
@ -3,7 +3,7 @@ name: authentik-on-tag
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "version/*"
|
||||
- 'version/*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
34
.github/workflows/translation-advice.yml
vendored
34
.github/workflows/translation-advice.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: authentik-translation-advice
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "!**"
|
||||
- "locale/**"
|
||||
- "web/src/locales/**"
|
||||
|
||||
jobs:
|
||||
post-comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik translations instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
### authentik translations instructions
|
||||
|
||||
Thanks for your pull request!
|
||||
|
||||
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.
|
11
.github/workflows/translation-compile.yml
vendored
11
.github/workflows/translation-compile.yml
vendored
@ -1,9 +1,12 @@
|
||||
name: authentik-backend-translate-compile
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- "locale/**"
|
||||
- '/locale/'
|
||||
pull_request:
|
||||
paths:
|
||||
- '/locale/'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@ -21,9 +24,9 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run compile
|
||||
run: poetry run ak compilemessages
|
||||
run: poetry run ./manage.py compilemessages
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
|
12
.github/workflows/web-api-publish.yml
vendored
12
.github/workflows/web-api-publish.yml
vendored
@ -1,9 +1,9 @@
|
||||
name: authentik-web-api-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
- 'schema.yml'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
@ -14,8 +14,8 @@ jobs:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
- uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
- name: Publish package
|
||||
@ -30,7 +30,7 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v5
|
||||
- uses: peter-evans/create-pull-request@v4
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
@ -42,7 +42,7 @@ jobs:
|
||||
signoff: true
|
||||
team-reviewers: "@goauthentik/core"
|
||||
author: authentik bot <github-bot@goauthentik.io>
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
- uses: peter-evans/enable-pull-request-automerge@v2
|
||||
with:
|
||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -200,6 +200,3 @@ media/
|
||||
.idea/
|
||||
/gen-*/
|
||||
data/
|
||||
|
||||
# Local Netlify folder
|
||||
.netlify
|
||||
|
20
.vscode/extensions.json
vendored
20
.vscode/extensions.json
vendored
@ -1,20 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"EditorConfig.EditorConfig",
|
||||
"bashmish.es6-string-css",
|
||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"golang.go",
|
||||
"Gruntfuggly.todo-tree",
|
||||
"mechatroner.rainbow-csv",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-python.pylint",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx"
|
||||
]
|
||||
}
|
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@ -16,8 +16,7 @@
|
||||
"passwordless",
|
||||
"kubernetes",
|
||||
"sso",
|
||||
"slo",
|
||||
"scim",
|
||||
"slo"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
@ -47,6 +46,5 @@
|
||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||
"ignoreCase": false
|
||||
}
|
||||
],
|
||||
"go.testFlags": ["-count=1"]
|
||||
]
|
||||
}
|
||||
|
@ -20,7 +20,6 @@ The following is a set of guidelines for contributing to authentik and its compo
|
||||
- [Reporting Bugs](#reporting-bugs)
|
||||
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||
- [Your First Code Contribution](#your-first-code-contribution)
|
||||
- [Help with the Docs](#help-with-the-docs)
|
||||
- [Pull Requests](#pull-requests)
|
||||
|
||||
[Styleguides](#styleguides)
|
||||
@ -136,9 +135,6 @@ authentik can be run locally, all though depending on which part you want to wor
|
||||
|
||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
### Help with the Docs
|
||||
Contributions to the technical documentation are greatly appreciated. Open a PR if you have improvements to make or new content to add. If you have questions or suggestions about the documentation, open an Issue. No contribution is too small.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
The process described here has several goals:
|
||||
@ -158,19 +154,12 @@ While the prerequisites above must be satisfied prior to having your pull reques
|
||||
|
||||
## Styleguides
|
||||
|
||||
### PR naming
|
||||
|
||||
- Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
- Use the format of `<package>: <verb> <description>`
|
||||
- See [here](#authentik-packages) for `package`
|
||||
- Example: `providers/saml2: fix parsing of requests`
|
||||
- Reference issues and pull requests liberally after the first line
|
||||
- Naming of commits within a PR does not need to adhere to the guidelines as we squash merge PRs
|
||||
|
||||
### Python Styleguide
|
||||
|
||||
|
18
Dockerfile
18
Dockerfile
@ -1,5 +1,5 @@
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
@ -10,7 +10,7 @@ WORKDIR /work/website
|
||||
RUN npm ci && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
@ -20,7 +20,7 @@ WORKDIR /work/web
|
||||
RUN npm ci && npm run build
|
||||
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.3-slim-bullseye AS poetry-locker
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
|
||||
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.20.4-bullseye AS go-builder
|
||||
FROM docker.io/golang:1.19.5-bullseye AS go-builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -47,7 +47,7 @@ COPY ./go.sum /work/go.sum
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM docker.io/maxmindinc/geoipupdate:v5.0 as geoip
|
||||
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
@ -62,7 +62,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.3-slim-bullseye AS final-image
|
||||
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
@ -83,7 +83,7 @@ RUN apt-get update && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
apt-get install -y --no-install-recommends curl runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||
apt-get autoremove --purge -y && \
|
||||
@ -96,13 +96,13 @@ RUN apt-get update && \
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./xml /xml
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /work/authentik /bin/authentik
|
||||
COPY --from=go-builder /work/authentik /authentik-proxy
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
|
9
LICENSE
9
LICENSE
@ -1,11 +1,6 @@
|
||||
Copyright (c) 2023 Jens Langhammer
|
||||
MIT License
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
|
||||
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
|
||||
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
|
||||
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
|
||||
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
|
||||
Copyright (c) 2022 Jens Langhammer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
71
Makefile
71
Makefile
@ -3,21 +3,6 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/developer-docs \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
@ -39,19 +24,28 @@ test:
|
||||
coverage report
|
||||
|
||||
lint-fix:
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
isort authentik tests scripts lifecycle
|
||||
black authentik tests scripts lifecycle
|
||||
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/docs \
|
||||
website/developer-docs
|
||||
|
||||
lint:
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
pylint authentik tests lifecycle
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v ./cmd/server/
|
||||
|
||||
i18n-extract: i18n-extract-core web-extract
|
||||
|
||||
i18n-extract-core:
|
||||
@ -65,20 +59,15 @@ gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog:
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
npx prettier --write changelog.md
|
||||
|
||||
gen-diff:
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
||||
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
rm -rf web/api/src/
|
||||
@ -88,7 +77,7 @@ gen-client-ts:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/gen-ts-api \
|
||||
@ -101,21 +90,20 @@ gen-client-ts:
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go:
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
||||
cp schema.yml ./gen-go-api/
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||
mkdir -p templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||
docker run \
|
||||
--rm -v ${PWD}/gen-go-api:/local \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/ \
|
||||
-o /local/gen-go-api \
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
rm -rf config.yaml ./templates/
|
||||
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
@ -173,6 +161,7 @@ website-watch:
|
||||
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
PY_SOURCES=authentik tests lifecycle
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
@ -183,12 +172,6 @@ ci-pylint: ci--meta-debug
|
||||
ci-black: ci--meta-debug
|
||||
black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
codespell $(CODESPELL_ARGS) -s
|
||||
|
||||
ci-isort: ci--meta-debug
|
||||
isort --check $(PY_SOURCES)
|
||||
|
||||
|
20
README.md
20
README.md
@ -15,13 +15,13 @@
|
||||
|
||||
## What is authentik?
|
||||
|
||||
Authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. Authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
|
||||
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
|
||||
|
||||
## Installation
|
||||
|
||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
|
||||
|
||||
## Screenshots
|
||||
|
||||
@ -32,16 +32,12 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Development
|
||||
|
||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
@ -53,3 +49,11 @@ This project is proudly sponsored by:
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
<p>
|
||||
<a href="https://www.netlify.com">
|
||||
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
@ -6,8 +6,8 @@ Authentik takes security very seriously. We follow the rules of [responsible dis
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | ------------------ |
|
||||
| 2023.2.x | :white_check_mark: |
|
||||
| 2023.3.x | :white_check_mark: |
|
||||
| 2022.12.x | :white_check_mark: |
|
||||
| 2023.1.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.4.1"
|
||||
__version__ = "2023.1.3"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -97,14 +97,8 @@ class SystemView(APIView):
|
||||
permission_classes = [IsAdminUser]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
@ -50,8 +50,7 @@ class TaskSerializer(PassiveSerializer):
|
||||
are pickled in cache. In that case, just delete the info"""
|
||||
try:
|
||||
return super().to_representation(instance)
|
||||
# pylint: disable=broad-except
|
||||
except Exception: # pragma: no cover
|
||||
except AttributeError: # pragma: no cover
|
||||
if isinstance(self.instance, list):
|
||||
for inst in self.instance:
|
||||
inst.delete()
|
||||
|
@ -18,4 +18,4 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.update_metrics()
|
||||
task.set_prom_metrics()
|
||||
|
@ -9,7 +9,6 @@ from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tasks import clean_expired_models
|
||||
from authentik.events.monitored_tasks import TaskResultStatus
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAdminAPI(TestCase):
|
||||
@ -17,8 +16,8 @@ class TestAdminAPI(TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.create(username=generate_id())
|
||||
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
self.user = User.objects.create(username="test-user")
|
||||
self.group = Group.objects.create(name="superusers", is_superuser=True)
|
||||
self.group.users.add(self.user)
|
||||
self.group.save()
|
||||
self.client.force_login(self.user)
|
||||
|
@ -32,17 +32,7 @@ def validate_auth(header: bytes) -> Optional[str]:
|
||||
|
||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
user = auth_user_lookup(raw_header)
|
||||
if not user:
|
||||
return None
|
||||
if not user.is_active:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
return user
|
||||
|
||||
|
||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
"""raw_header in the Format of `Bearer ....`"""
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
auth_credentials = validate_auth(raw_header)
|
||||
if not auth_credentials:
|
||||
@ -55,8 +45,8 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
||||
CTX_AUTH_VIA.set("api_token")
|
||||
return key_token.user
|
||||
# then try to auth via JWT
|
||||
jwt_token = AccessToken.filter_not_expired(
|
||||
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
jwt_token = RefreshToken.filter_not_expired(
|
||||
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||
).first()
|
||||
if jwt_token:
|
||||
# Double-check scopes, since they are saved in a single string
|
||||
|
@ -7,13 +7,82 @@ API Browser - {{ tenant.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
<link rel="icon" href="{{ tenant.branding_favicon }}">
|
||||
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
|
||||
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
|
||||
<script>
|
||||
function getCookie(name) {
|
||||
let cookieValue = "";
|
||||
if (document.cookie && document.cookie !== "") {
|
||||
const cookies = document.cookie.split(";");
|
||||
for (let i = 0; i < cookies.length; i++) {
|
||||
const cookie = cookies[i].trim();
|
||||
// Does this cookie string begin with the name we want?
|
||||
if (cookie.substring(0, name.length + 1) === name + "=") {
|
||||
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookieValue;
|
||||
}
|
||||
window.addEventListener('DOMContentLoaded', (event) => {
|
||||
const rapidocEl = document.querySelector('rapi-doc');
|
||||
rapidocEl.addEventListener('before-try', (e) => {
|
||||
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<style>
|
||||
img.logo {
|
||||
width: 100%;
|
||||
padding: 1rem 0.5rem 1.5rem 0.5rem;
|
||||
min-height: 48px;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
|
||||
<rapi-doc
|
||||
spec-url="{{ path }}"
|
||||
heading-text=""
|
||||
theme="light"
|
||||
render-style="read"
|
||||
default-schema-tab="schema"
|
||||
primary-color="#fd4b2d"
|
||||
nav-bg-color="#212427"
|
||||
bg-color="#000000"
|
||||
text-color="#000000"
|
||||
nav-text-color="#ffffff"
|
||||
nav-hover-bg-color="#3c3f42"
|
||||
nav-accent-color="#4f5255"
|
||||
nav-hover-text-color="#ffffff"
|
||||
use-path-in-nav-bar="true"
|
||||
nav-item-spacing="relaxed"
|
||||
allow-server-selection="false"
|
||||
show-header="false"
|
||||
allow-spec-url-load="false"
|
||||
allow-spec-file-load="false">
|
||||
<div slot="nav-logo">
|
||||
<img alt="authentik Logo" class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||
</div>
|
||||
</rapi-doc>
|
||||
<script>
|
||||
const rapidoc = document.querySelector("rapi-doc");
|
||||
const matcher = window.matchMedia("(prefers-color-scheme: light)");
|
||||
const changer = (ev) => {
|
||||
const style = getComputedStyle(document.documentElement);
|
||||
let bg, text = "";
|
||||
if (matcher.matches) {
|
||||
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
|
||||
text = style.getPropertyValue('--pf-global--Color--300');
|
||||
} else {
|
||||
bg = style.getPropertyValue('--ak-dark-background');
|
||||
text = style.getPropertyValue('--ak-dark-foreground');
|
||||
}
|
||||
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
|
||||
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
|
||||
rapidoc.requestUpdate();
|
||||
};
|
||||
matcher.addEventListener("change", changer);
|
||||
window.addEventListener("load", changer);
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
@ -1,19 +1,18 @@
|
||||
"""Test API Authentication"""
|
||||
import json
|
||||
from base64 import b64encode
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import bearer_auth
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||
|
||||
|
||||
class TestAPIAuth(TestCase):
|
||||
@ -37,18 +36,9 @@ class TestAPIAuth(TestCase):
|
||||
|
||||
def test_bearer_valid(self):
|
||||
"""Test valid token"""
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||
|
||||
def test_bearer_valid_deactivated(self):
|
||||
"""Test valid token"""
|
||||
user = create_test_admin_user()
|
||||
user.is_active = False
|
||||
user.save()
|
||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
@ -65,28 +55,24 @@ class TestAPIAuth(TestCase):
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
refresh_token=generate_id(),
|
||||
_scope=SCOPE_AUTHENTIK_API,
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
||||
def test_jwt_missing_scope(self):
|
||||
"""Test valid JWT"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||
)
|
||||
refresh = AccessToken.objects.create(
|
||||
user=create_test_admin_user(),
|
||||
refresh = RefreshToken.objects.create(
|
||||
user=get_anonymous_user(),
|
||||
provider=provider,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
refresh_token=generate_id(),
|
||||
_scope="",
|
||||
_id_token=json.dumps({}),
|
||||
)
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
||||
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||
|
@ -4,7 +4,6 @@ from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPIDecorators(APITestCase):
|
||||
@ -17,7 +16,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_obj_perm_denied(self):
|
||||
"""Test object perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
)
|
||||
@ -26,7 +25,7 @@ class TestAPIDecorators(APITestCase):
|
||||
def test_other_perm_denied(self):
|
||||
"""Test other perm denied"""
|
||||
self.client.force_login(self.user)
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app = Application.objects.create(name="denied", slug="denied")
|
||||
assign_perm("authentik_core.view_application", self.user, app)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||
|
@ -29,7 +29,6 @@ class Capabilities(models.TextChoices):
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
IS_ENTERPRISE = "is_enterprise"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
@ -71,8 +70,6 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
caps.append(Capabilities.CAN_DEBUG)
|
||||
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
||||
caps.append(Capabilities.IS_ENTERPRISE)
|
||||
return caps
|
||||
|
||||
def get_config(self) -> ConfigSerializer:
|
||||
|
@ -50,17 +50,10 @@ from authentik.policies.reputation.api import ReputationPolicyViewSet, Reputatio
|
||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||
from authentik.providers.oauth2.api.tokens import (
|
||||
AccessTokenViewSet,
|
||||
AuthorizationCodeViewSet,
|
||||
RefreshTokenViewSet,
|
||||
)
|
||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||
from authentik.providers.radius.api import RadiusOutpostConfigViewSet, RadiusProviderViewSet
|
||||
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||
from authentik.providers.scim.api.property_mapping import SCIMMappingViewSet
|
||||
from authentik.providers.scim.api.providers import SCIMProviderViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
@ -129,7 +122,6 @@ router.register("outposts/service_connections/docker", DockerServiceConnectionVi
|
||||
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||
router.register("outposts/radius", RadiusOutpostConfigViewSet)
|
||||
|
||||
router.register("flows/instances", FlowViewSet)
|
||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||
@ -167,19 +159,15 @@ router.register("providers/ldap", LDAPProviderViewSet)
|
||||
router.register("providers/proxy", ProxyProviderViewSet)
|
||||
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
||||
router.register("providers/saml", SAMLProviderViewSet)
|
||||
router.register("providers/scim", SCIMProviderViewSet)
|
||||
router.register("providers/radius", RadiusProviderViewSet)
|
||||
|
||||
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
||||
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
||||
router.register("oauth2/access_tokens", AccessTokenViewSet)
|
||||
|
||||
router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
router.register("propertymappings/scim", SCIMMappingViewSet)
|
||||
|
||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
|
@ -49,8 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
|
||||
return content
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
@ -59,6 +58,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = BlueprintInstance
|
||||
fields = [
|
||||
"pk",
|
||||
|
@ -55,12 +55,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
def reconcile_blueprints_discovery(self):
|
||||
def reconcile_blueprints_discover(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||
|
||||
blueprints_discovery.delay()
|
||||
clear_failed_blueprints.delay()
|
||||
blueprints_discover.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
|
@ -19,8 +19,10 @@ class Command(BaseCommand):
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer(content)
|
||||
valid, _ = importer.validate()
|
||||
valid, logs = importer.validate()
|
||||
if not valid:
|
||||
for log in logs:
|
||||
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||
self.stderr.write("blueprint invalid")
|
||||
sys_exit(1)
|
||||
importer.apply()
|
||||
|
@ -1,17 +1,12 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
from json import dumps
|
||||
from typing import Any
|
||||
from json import dumps, loads
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from django.db.models import Model
|
||||
from drf_jsonschema_serializer.convert import field_to_converter
|
||||
from rest_framework.fields import Field, JSONField, UUIDField
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -21,138 +16,21 @@ class Command(BaseCommand):
|
||||
|
||||
schema: dict
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"required": ["version", "entries"],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1,
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [],
|
||||
},
|
||||
},
|
||||
},
|
||||
"$defs": {},
|
||||
}
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
self.build()
|
||||
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
|
||||
path = Path(__file__).parent.joinpath("./schema_template.json")
|
||||
with open(path, "r", encoding="utf-8") as _template_file:
|
||||
self.schema = loads(_template_file.read())
|
||||
self.set_model_allowed()
|
||||
self.stdout.write(dumps(self.schema, indent=4))
|
||||
|
||||
@staticmethod
|
||||
def json_default(value: Any) -> Any:
|
||||
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
|
||||
return str(value)
|
||||
|
||||
def build(self):
|
||||
"""Build all models into the schema"""
|
||||
def set_model_allowed(self):
|
||||
"""Set model enum"""
|
||||
model_names = []
|
||||
for model in registry.get_models():
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
if not is_model_allowed(model):
|
||||
continue
|
||||
model_instance: Model = model()
|
||||
if not isinstance(model_instance, SerializerModel):
|
||||
continue
|
||||
serializer = model_instance.serializer()
|
||||
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
|
||||
self.schema["properties"]["entries"]["items"]["oneOf"].append(
|
||||
self.template_entry(model_path, serializer)
|
||||
)
|
||||
|
||||
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
|
||||
"""Template entry for a single model"""
|
||||
model_schema = self.to_jsonschema(serializer)
|
||||
model_schema["required"] = []
|
||||
def_name = f"model_{model_path}"
|
||||
def_path = f"#/$defs/{def_name}"
|
||||
self.schema["$defs"][def_name] = model_schema
|
||||
return {
|
||||
"type": "object",
|
||||
"required": ["model", "attrs"],
|
||||
"properties": {
|
||||
"model": {"const": model_path},
|
||||
"id": {"type": "string"},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["absent", "present", "created"],
|
||||
"default": "present",
|
||||
},
|
||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||
"attrs": {"$ref": def_path},
|
||||
"identifiers": {"$ref": def_path},
|
||||
},
|
||||
}
|
||||
|
||||
def field_to_jsonschema(self, field: Field) -> dict:
|
||||
"""Convert a single field to json schema"""
|
||||
if isinstance(field, Serializer):
|
||||
result = self.to_jsonschema(field)
|
||||
else:
|
||||
try:
|
||||
converter = field_to_converter[field]
|
||||
result = converter.convert(field)
|
||||
except KeyError:
|
||||
if isinstance(field, JSONField):
|
||||
result = {"type": "object", "additionalProperties": True}
|
||||
elif isinstance(field, UUIDField):
|
||||
result = {"type": "string", "format": "uuid"}
|
||||
else:
|
||||
raise
|
||||
if field.label:
|
||||
result["title"] = field.label
|
||||
if field.help_text:
|
||||
result["description"] = field.help_text
|
||||
return self.clean_result(result)
|
||||
|
||||
def clean_result(self, result: dict) -> dict:
|
||||
"""Remove enumNames from result, recursively"""
|
||||
result.pop("enumNames", None)
|
||||
for key, value in result.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = self.clean_result(value)
|
||||
return result
|
||||
|
||||
def to_jsonschema(self, serializer: Serializer) -> dict:
|
||||
"""Convert serializer to json schema"""
|
||||
properties = {}
|
||||
required = []
|
||||
for name, field in serializer.fields.items():
|
||||
if field.read_only:
|
||||
continue
|
||||
sub_schema = self.field_to_jsonschema(field)
|
||||
if field.required:
|
||||
required.append(name)
|
||||
properties[name] = sub_schema
|
||||
|
||||
result = {"type": "object", "properties": properties}
|
||||
if required:
|
||||
result["required"] = required
|
||||
return result
|
||||
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
|
||||
model_names.sort()
|
||||
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names
|
||||
|
105
authentik/blueprints/management/commands/schema_template.json
Normal file
105
authentik/blueprints/management/commands/schema_template.json
Normal file
@ -0,0 +1,105 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "http://example.com/example.json",
|
||||
"type": "object",
|
||||
"title": "authentik Blueprint schema",
|
||||
"default": {},
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"$id": "#/properties/version",
|
||||
"type": "integer",
|
||||
"title": "Blueprint version",
|
||||
"default": 1
|
||||
},
|
||||
"metadata": {
|
||||
"$id": "#/properties/metadata",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"labels": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
"context": {
|
||||
"$id": "#/properties/context",
|
||||
"type": "object",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$id": "#entry",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"placeholder"
|
||||
]
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"attrs": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Commonly available field, may not exist on all models"
|
||||
}
|
||||
},
|
||||
"default": {},
|
||||
"additionalProperties": true
|
||||
},
|
||||
"identifiers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"properties": {
|
||||
"pk": {
|
||||
"description": "Commonly available field, may not exist on all models",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@ from pathlib import Path
|
||||
import django.contrib.postgres.fields
|
||||
from dacite.core import from_dict
|
||||
from django.apps.registry import Apps
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from yaml import load
|
||||
@ -14,7 +15,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
||||
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
|
||||
"""Check if blueprint should be imported"""
|
||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||
@ -70,6 +71,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [("authentik_flows", "0001_initial")]
|
||||
@ -84,12 +86,7 @@ class Migration(migrations.Migration):
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and"
|
||||
" updated automatically. This is flag only indicates that an object can"
|
||||
" be overwritten by migrations. You can still modify the objects via"
|
||||
" the API, but expect changes to be overwritten in a later update."
|
||||
),
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0001_initial"),
|
||||
]
|
||||
|
@ -1,20 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-04-28 10:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.lib.migrations import fallback_names
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_blueprints", "0002_blueprintinstance_content"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
|
||||
migrations.AlterField(
|
||||
model_name="blueprintinstance",
|
||||
name="name",
|
||||
field=models.TextField(unique=True),
|
||||
),
|
||||
]
|
@ -29,15 +29,18 @@ class ManagedModel(models.Model):
|
||||
null=True,
|
||||
verbose_name=_("Managed by authentik"),
|
||||
help_text=_(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
(
|
||||
"Objects which are managed by authentik. These objects are created and updated "
|
||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||
"migrations. You can still modify the objects via the API, but expect changes "
|
||||
"to be overwritten in a later update."
|
||||
)
|
||||
),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
@ -57,7 +60,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
|
||||
name = models.TextField(unique=True)
|
||||
name = models.TextField()
|
||||
metadata = models.JSONField(default=dict)
|
||||
path = models.TextField(default="", blank=True)
|
||||
content = models.TextField(default="", blank=True)
|
||||
@ -106,6 +109,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
return f"Blueprint Instance {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
unique_together = (
|
||||
|
@ -5,13 +5,8 @@ from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"blueprints_v1_cleanup": {
|
||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Blueprint helpers"""
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from django.apps import apps
|
||||
@ -44,3 +45,13 @@ def reconcile_app(app_name: str):
|
||||
return wrapper
|
||||
|
||||
return wrapper_outer
|
||||
|
||||
|
||||
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||
fixture = _fixture.read()
|
||||
try:
|
||||
return fixture % kwargs
|
||||
except TypeError:
|
||||
return fixture
|
||||
|
@ -4,7 +4,6 @@ entries:
|
||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||
model: authentik_stages_prompt.prompt
|
||||
attrs:
|
||||
name: qwerweqrq
|
||||
field_key: username
|
||||
label: Username
|
||||
type: username
|
||||
|
@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
|
||||
class TestPackaged(TransactionTestCase):
|
||||
"""Empty class, test methods are added dynamically"""
|
||||
|
||||
@apply_blueprint("default/default-tenant.yaml")
|
||||
@apply_blueprint("default/90-default-tenant.yaml")
|
||||
def test_decorator_static(self):
|
||||
"""Test @apply_blueprint decorator"""
|
||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||
|
@ -3,12 +3,12 @@ from os import environ
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
from authentik.policies.expression.models import ExpressionPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
@ -24,14 +24,18 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
)
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
|
||||
@ -55,9 +59,11 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
)
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
@ -113,14 +119,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
@ -130,7 +136,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
@ -256,21 +262,15 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
with transaction_rollback():
|
||||
# First stage fields
|
||||
username_prompt = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="username",
|
||||
label="Username",
|
||||
order=0,
|
||||
type=FieldTypes.TEXT,
|
||||
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||
)
|
||||
password = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password",
|
||||
label="Password",
|
||||
order=1,
|
||||
type=FieldTypes.PASSWORD,
|
||||
)
|
||||
password_repeat = Prompt.objects.create(
|
||||
name=generate_id(),
|
||||
field_key="password_repeat",
|
||||
label="Password (repeat)",
|
||||
order=2,
|
||||
|
@ -67,7 +67,4 @@ class TestBlueprintsV1API(APITestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||
)
|
||||
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
@ -14,7 +14,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test conditions fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_fixture(
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
@ -31,7 +31,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"""Test conditions not fulfilled"""
|
||||
flow_slug1 = generate_id()
|
||||
flow_slug2 = generate_id()
|
||||
import_yaml = load_fixture(
|
||||
import_yaml = load_yaml_fixture(
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.tests import load_yaml_fixture
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1State(TransactionTestCase):
|
||||
@ -13,7 +13,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_present(self):
|
||||
"""Test state present"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -39,7 +39,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_created(self):
|
||||
"""Test state created"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -65,7 +65,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
def test_state_absent(self):
|
||||
"""Test state absent"""
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
@ -74,7 +74,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
@ -6,7 +6,7 @@ from django.test import TransactionTestCase
|
||||
from yaml import dump
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -53,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
@ -81,7 +81,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
@ -106,7 +106,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
|
@ -7,7 +7,6 @@ from dacite.config import Config
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
@ -40,10 +39,6 @@ from authentik.lib.models import SerializerModel
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
@ -162,12 +157,7 @@ class Importer:
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(
|
||||
data=entry.get_attrs(self.__import),
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
)
|
||||
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -191,10 +181,7 @@ class Importer:
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
existing_models = model.objects.filter(query)
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
@ -226,12 +213,7 @@ class Importer:
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
serializer: Serializer = model().serializer(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
**serializer_kwargs,
|
||||
)
|
||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
@ -249,7 +231,8 @@ class Importer:
|
||||
raise IntegrityError
|
||||
except IntegrityError:
|
||||
return False
|
||||
self.logger.debug("Committing changes")
|
||||
else:
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
@ -299,7 +282,7 @@ class Importer:
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
return False, []
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
|
@ -3,4 +3,3 @@
|
||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
||||
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"
|
||||
|
@ -56,4 +56,5 @@ class MetaApplyBlueprint(BaseMetaModel):
|
||||
return ApplyBlueprintMetaSerializer
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
@ -14,6 +14,7 @@ class BaseMetaModel(Model):
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
|
@ -76,7 +76,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
return
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discovery.delay()
|
||||
blueprints_discover.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||
@ -101,10 +101,7 @@ def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.y("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
for path in root.glob("**/*.yaml"):
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
@ -125,7 +122,7 @@ def blueprints_find():
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.debug(
|
||||
LOGGER.info(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
@ -137,7 +134,7 @@ def blueprints_find():
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
def blueprints_discover(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
@ -222,14 +219,3 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def clear_failed_blueprints():
|
||||
"""Remove blueprints which couldn't be fetched"""
|
||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
|
||||
try:
|
||||
blueprint.retrieve()
|
||||
except BlueprintRetrievalFailed:
|
||||
blueprint.delete()
|
||||
|
@ -37,6 +37,7 @@ from authentik.lib.utils.file import (
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -62,6 +63,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
return app.get_launch_url(user)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Application
|
||||
fields = [
|
||||
"pk",
|
||||
@ -185,6 +187,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
if superuser_full_list and request.user.is_superuser:
|
||||
return super().list(request)
|
||||
|
||||
# To prevent the user from having to double login when prompt is set to login
|
||||
# and the user has just signed it. This session variable is set in the UserLoginStage
|
||||
# and is (quite hackily) removed from the session in applications's API's List method
|
||||
self.request.session.pop(USER_LOGIN_AUTHENTICATED, None)
|
||||
queryset = self._filter_queryset_for_list(self.get_queryset())
|
||||
self.paginate_queryset(queryset)
|
||||
|
||||
|
@ -74,6 +74,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = AuthenticatedSession
|
||||
fields = [
|
||||
"uuid",
|
||||
|
@ -24,10 +24,12 @@ from authentik.core.models import Group, User
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -36,6 +38,7 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
"is_active",
|
||||
"last_login",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
"uid",
|
||||
]
|
||||
@ -53,6 +56,7 @@ class GroupSerializer(ModelSerializer):
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
@ -110,6 +114,7 @@ class GroupFilter(FilterSet):
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||
|
||||
|
@ -49,6 +49,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
||||
return expression
|
||||
|
||||
class Meta:
|
||||
|
||||
model = PropertyMapping
|
||||
fields = [
|
||||
"pk",
|
||||
@ -93,6 +94,7 @@ class PropertyMappingViewSet(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
"description": subclass.__doc__,
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
"component": subclass().component,
|
||||
"model_name": subclass._meta.model_name,
|
||||
}
|
||||
|
@ -25,16 +25,17 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
def get_component(self, obj: Provider) -> str: # pragma: no cover
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if obj.__class__ == Provider:
|
||||
return ""
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Provider
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"authentication_flow",
|
||||
"authorization_flow",
|
||||
"property_mappings",
|
||||
"component",
|
||||
@ -44,9 +45,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"authorization_flow": {"required": True, "allow_null": False},
|
||||
}
|
||||
|
||||
|
||||
class ProviderViewSet(
|
||||
|
@ -40,11 +40,13 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if obj.__class__ == Source:
|
||||
return ""
|
||||
return obj.component
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Source
|
||||
fields = [
|
||||
"pk",
|
||||
@ -138,6 +140,7 @@ class SourceViewSet(
|
||||
component = subclass.__bases__[0]().component
|
||||
else:
|
||||
component = subclass().component
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
data.append(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
@ -204,6 +207,5 @@ class UserSourceConnectionViewSet(
|
||||
queryset = UserSourceConnection.objects.all()
|
||||
serializer_class = UserSourceConnectionSerializer
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filterset_fields = ["user"]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["pk"]
|
||||
|
@ -16,7 +16,6 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.api import ManagedSerializer
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import UserSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
@ -30,27 +29,17 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
||||
|
||||
user_obj = UserSerializer(required=False, source="user", read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["key"] = CharField()
|
||||
|
||||
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
||||
"""Ensure only API or App password tokens are created."""
|
||||
request: Request = self.context.get("request")
|
||||
if not request:
|
||||
if "user" not in attrs:
|
||||
raise ValidationError("Missing user")
|
||||
if "intent" not in attrs:
|
||||
raise ValidationError("Missing intent")
|
||||
else:
|
||||
attrs.setdefault("user", request.user)
|
||||
request: Request = self.context["request"]
|
||||
attrs.setdefault("user", request.user)
|
||||
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
||||
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
||||
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Token
|
||||
fields = [
|
||||
"pk",
|
||||
@ -145,10 +134,9 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
def set_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Set token key. Action is logged as event. `authentik_core.set_token_key` permission
|
||||
is required."""
|
||||
"""Return token key and log access"""
|
||||
token: Token = self.get_object()
|
||||
key = request.data.get("key")
|
||||
key = request.POST.get("key")
|
||||
if not key:
|
||||
return Response(status=400)
|
||||
token.key = key
|
||||
|
@ -56,6 +56,7 @@ class UsedByMixin:
|
||||
# pylint: disable=too-many-locals
|
||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Get a list of all objects that use this object"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
model: Model = self.get_object()
|
||||
used_by = []
|
||||
shadows = []
|
||||
|
@ -4,8 +4,6 @@ from json import loads
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.transaction import atomic
|
||||
@ -38,13 +36,11 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
BooleanField,
|
||||
DateTimeField,
|
||||
ListSerializer,
|
||||
ModelSerializer,
|
||||
PrimaryKeyRelatedField,
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
@ -61,14 +57,12 @@ from authentik.core.models import (
|
||||
USER_ATTRIBUTE_SA,
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
AuthenticatedSession,
|
||||
Group,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
)
|
||||
from authentik.events.models import EventAction
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import FlowToken
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||
from authentik.flows.views.executor import QS_KEY_TOKEN
|
||||
@ -87,6 +81,7 @@ class UserGroupSerializer(ModelSerializer):
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
@ -110,7 +105,7 @@ class UserSerializer(ModelSerializer):
|
||||
)
|
||||
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
|
||||
username = CharField(max_length=150)
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Validate path"""
|
||||
@ -122,6 +117,7 @@ class UserSerializer(ModelSerializer):
|
||||
return path
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -173,6 +169,7 @@ class UserSelfSerializer(ModelSerializer):
|
||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
"pk",
|
||||
@ -211,9 +208,8 @@ class UserMetricsSerializer(PassiveSerializer):
|
||||
def get_logins(self, _):
|
||||
"""Get successful logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
@ -224,9 +220,8 @@ class UserMetricsSerializer(PassiveSerializer):
|
||||
def get_logins_failed(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.LOGIN_FAILED, context__username=user.username
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
@ -237,9 +232,8 @@ class UserMetricsSerializer(PassiveSerializer):
|
||||
def get_authorizations(self, _):
|
||||
"""Get failed logins per 8 hours for the last 7 days"""
|
||||
user = self.context["user"]
|
||||
request = self.context["request"]
|
||||
return (
|
||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
||||
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
|
||||
)
|
||||
# 3 data points per day, so 8 hour spans
|
||||
@ -330,16 +324,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
user: User = self.get_object()
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
try:
|
||||
plan = planner.plan(
|
||||
self.request._request,
|
||||
{
|
||||
PLAN_CONTEXT_PENDING_USER: user,
|
||||
},
|
||||
)
|
||||
except FlowNonApplicableException:
|
||||
LOGGER.warning("Recovery flow not applicable to user")
|
||||
return None, None
|
||||
plan = planner.plan(
|
||||
self.request._request,
|
||||
{
|
||||
PLAN_CONTEXT_PENDING_USER: user,
|
||||
},
|
||||
)
|
||||
token, __ = FlowToken.objects.update_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
defaults={
|
||||
@ -362,11 +352,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
{
|
||||
"name": CharField(required=True),
|
||||
"create_group": BooleanField(default=False),
|
||||
"expiring": BooleanField(default=True),
|
||||
"expires": DateTimeField(
|
||||
required=False,
|
||||
help_text="If not provided, valid for 360 days",
|
||||
),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
@ -387,20 +372,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Create a new user account that is marked as a service account"""
|
||||
username = request.data.get("name")
|
||||
create_group = request.data.get("create_group", False)
|
||||
expiring = request.data.get("expiring", True)
|
||||
expires = request.data.get("expires", now() + timedelta(days=360))
|
||||
|
||||
with atomic():
|
||||
try:
|
||||
user: User = User.objects.create(
|
||||
user = User.objects.create(
|
||||
username=username,
|
||||
name=username,
|
||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
|
||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||
path=USER_PATH_SERVICE_ACCOUNT,
|
||||
)
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
response = {
|
||||
"username": user.username,
|
||||
"user_uid": user.uid,
|
||||
@ -416,12 +395,11 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
identifier=slugify(f"service-account-{username}-password"),
|
||||
intent=TokenIntents.INTENT_APP_PASSWORD,
|
||||
user=user,
|
||||
expires=expires,
|
||||
expiring=expiring,
|
||||
expires=now() + timedelta(days=360),
|
||||
)
|
||||
response["token"] = token.key
|
||||
return Response(response)
|
||||
except IntegrityError as exc:
|
||||
except (IntegrityError) as exc:
|
||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@ -474,9 +452,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
def metrics(self, request: Request, pk: int) -> Response:
|
||||
"""User metrics per 1h"""
|
||||
user: User = self.get_object()
|
||||
serializer = UserMetricsSerializer(instance={})
|
||||
serializer = UserMetricsSerializer(True)
|
||||
serializer.context["user"] = user
|
||||
serializer.context["request"] = request
|
||||
return Response(serializer.data)
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@ -584,14 +561,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def partial_update(self, request: Request, *args, **kwargs) -> Response:
|
||||
response = super().partial_update(request, *args, **kwargs)
|
||||
instance: User = self.get_object()
|
||||
if not instance.is_active:
|
||||
sessions = AuthenticatedSession.objects.filter(user=instance)
|
||||
session_ids = sessions.values_list("session_key", flat=True)
|
||||
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
|
||||
sessions.delete()
|
||||
LOGGER.debug("Deleted user's sessions", user=instance.username)
|
||||
return response
|
||||
|
@ -11,7 +11,6 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
||||
label = "authentik_core"
|
||||
verbose_name = "authentik Core"
|
||||
mountpoint = ""
|
||||
ws_mountpoint = "authentik.core.urls"
|
||||
default = True
|
||||
|
||||
def reconcile_load_core_signals(self):
|
||||
|
@ -1,9 +1,8 @@
|
||||
"""Property Mapping Evaluator"""
|
||||
from typing import Any, Optional
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models import Model
|
||||
from django.http import HttpRequest
|
||||
from prometheus_client import Histogram
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
@ -11,24 +10,15 @@ from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
PROPERTY_MAPPING_TIME = Histogram(
|
||||
"authentik_property_mapping_execution_time",
|
||||
"Evaluation time of property mappings",
|
||||
["mapping_name"],
|
||||
)
|
||||
|
||||
|
||||
class PropertyMappingEvaluator(BaseEvaluator):
|
||||
"""Custom Evaluator that adds some different context variables."""
|
||||
|
||||
dry_run: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: Model,
|
||||
user: Optional[User] = None,
|
||||
request: Optional[HttpRequest] = None,
|
||||
dry_run: Optional[bool] = False,
|
||||
**kwargs,
|
||||
):
|
||||
if hasattr(model, "name"):
|
||||
@ -45,13 +35,9 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||
req.http_request = request
|
||||
self._context["request"] = req
|
||||
self._context.update(**kwargs)
|
||||
self.dry_run = dry_run
|
||||
|
||||
def handle_error(self, exc: Exception, expression_source: str):
|
||||
"""Exception Handler"""
|
||||
# For dry-run requests we don't save exceptions
|
||||
if self.dry_run:
|
||||
return
|
||||
error_string = exception_to_string(exc)
|
||||
event = Event.new(
|
||||
EventAction.PROPERTY_MAPPING_EXCEPTION,
|
||||
@ -63,7 +49,3 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||
event.from_http(req.http_request, req.user)
|
||||
return
|
||||
event.save()
|
||||
|
||||
def evaluate(self, *args, **kwargs) -> Any:
|
||||
with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time():
|
||||
return super().evaluate(*args, **kwargs)
|
||||
|
@ -14,6 +14,7 @@ import authentik.core.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
@ -43,10 +44,7 @@ class Migration(migrations.Migration):
|
||||
"is_superuser",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text=(
|
||||
"Designates that this user has all permissions without explicitly"
|
||||
" assigning them."
|
||||
),
|
||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
||||
verbose_name="superuser status",
|
||||
),
|
||||
),
|
||||
@ -54,9 +52,7 @@ class Migration(migrations.Migration):
|
||||
"username",
|
||||
models.CharField(
|
||||
error_messages={"unique": "A user with that username already exists."},
|
||||
help_text=(
|
||||
"Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only."
|
||||
),
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
||||
@ -87,10 +83,7 @@ class Migration(migrations.Migration):
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
default=True,
|
||||
help_text=(
|
||||
"Designates whether this user should be treated as active. Unselect"
|
||||
" this instead of deleting accounts."
|
||||
),
|
||||
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
||||
verbose_name="active",
|
||||
),
|
||||
),
|
||||
|
@ -18,13 +18,13 @@ def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
akadmin, _ = User.objects.using(db_alias).get_or_create(
|
||||
username="akadmin",
|
||||
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
|
||||
name="authentik Default Admin",
|
||||
username="akadmin", email="root@localhost", name="authentik Default Admin"
|
||||
)
|
||||
password = None
|
||||
if "TF_BUILD" in environ or settings.TEST:
|
||||
password = "akadmin" # noqa # nosec
|
||||
if "AK_ADMIN_PASS" in environ:
|
||||
password = environ["AK_ADMIN_PASS"]
|
||||
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
|
||||
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
|
||||
if password:
|
||||
@ -51,6 +51,7 @@ def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_core", "0002_auto_20200523_1133"),
|
||||
("authentik_core", "0003_default_user"),
|
||||
@ -171,10 +172,7 @@ class Migration(migrations.Migration):
|
||||
name="groups",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text=(
|
||||
"The groups this user belongs to. A user will get all permissions granted to"
|
||||
" each of their groups."
|
||||
),
|
||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.Group",
|
||||
|
@ -17,6 +17,7 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_core", "0012_auto_20201003_1737"),
|
||||
("authentik_core", "0013_auto_20201003_2132"),
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0016_auto_20201202_2234"),
|
||||
]
|
||||
@ -14,12 +15,7 @@ class Migration(migrations.Migration):
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and updated"
|
||||
" automatically. This is flag only indicates that an object can be overwritten"
|
||||
" by migrations. You can still modify the objects via the API, but expect"
|
||||
" changes to be overwritten in a later update."
|
||||
),
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
verbose_name="Managed by authentik",
|
||||
unique=True,
|
||||
@ -30,12 +26,7 @@ class Migration(migrations.Migration):
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and updated"
|
||||
" automatically. This is flag only indicates that an object can be overwritten"
|
||||
" by migrations. You can still modify the objects via the API, but expect"
|
||||
" changes to be overwritten in a later update."
|
||||
),
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
verbose_name="Managed by authentik",
|
||||
unique=True,
|
||||
|
@ -46,9 +46,13 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
|
||||
akadmin = User.objects.using(db_alias).filter(username="akadmin")
|
||||
if not akadmin.exists():
|
||||
return
|
||||
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
|
||||
key = None
|
||||
if "AK_ADMIN_TOKEN" in environ:
|
||||
key = environ["AK_ADMIN_TOKEN"]
|
||||
if "AUTHENTIK_BOOTSTRAP_TOKEN" in environ:
|
||||
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
|
||||
if not key:
|
||||
return
|
||||
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
|
||||
Token.objects.using(db_alias).create(
|
||||
identifier="authentik-bootstrap-token",
|
||||
user=akadmin.first(),
|
||||
@ -59,6 +63,7 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_core", "0018_auto_20210330_1345"),
|
||||
("authentik_core", "0019_source_managed"),
|
||||
@ -91,12 +96,7 @@ class Migration(migrations.Migration):
|
||||
name="managed",
|
||||
field=models.TextField(
|
||||
default=None,
|
||||
help_text=(
|
||||
"Objects which are managed by authentik. These objects are created and updated"
|
||||
" automatically. This is flag only indicates that an object can be overwritten"
|
||||
" by migrations. You can still modify the objects via the API, but expect"
|
||||
" changes to be overwritten in a later update."
|
||||
),
|
||||
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
@ -110,38 +110,23 @@ class Migration(migrations.Migration):
|
||||
("identifier", "Use the source-specific identifier"),
|
||||
(
|
||||
"email_link",
|
||||
(
|
||||
"Link to a user with identical email address. Can have security"
|
||||
" implications when a source doesn't validate email addresses."
|
||||
),
|
||||
"Link to a user with identical email address. Can have security implications when a source doesn't validate email addresses.",
|
||||
),
|
||||
(
|
||||
"email_deny",
|
||||
(
|
||||
"Use the user's email address, but deny enrollment when the email"
|
||||
" address already exists."
|
||||
),
|
||||
"Use the user's email address, but deny enrollment when the email address already exists.",
|
||||
),
|
||||
(
|
||||
"username_link",
|
||||
(
|
||||
"Link to a user with identical username. Can have security implications"
|
||||
" when a username is used with another source."
|
||||
),
|
||||
"Link to a user with identical username. Can have security implications when a username is used with another source.",
|
||||
),
|
||||
(
|
||||
"username_deny",
|
||||
(
|
||||
"Use the user's username, but deny enrollment when the username already"
|
||||
" exists."
|
||||
),
|
||||
"Use the user's username, but deny enrollment when the username already exists.",
|
||||
),
|
||||
],
|
||||
default="identifier",
|
||||
help_text=(
|
||||
"How the source determines if an existing user should be authenticated or a new"
|
||||
" user enrolled."
|
||||
),
|
||||
help_text="How the source determines if an existing user should be authenticated or a new user enrolled.",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@ -182,9 +167,7 @@ class Migration(migrations.Migration):
|
||||
model_name="application",
|
||||
name="meta_launch_url",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
validators=[authentik.lib.models.DomainlessFormattedURLValidator()],
|
||||
blank=True, default="", validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
|
||||
]
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0019_application_group"),
|
||||
]
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0020_application_open_in_new_tab"),
|
||||
]
|
||||
|
@ -5,6 +5,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0021_source_user_path_user_path"),
|
||||
]
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0022_alter_group_parent"),
|
||||
]
|
||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0023_source_authentik_c_slug_ccb2e5_idx_and_more"),
|
||||
]
|
||||
|
@ -1,25 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-02 21:32
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
|
||||
("authentik_core", "0024_source_icon"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="authorization_flow",
|
||||
field=models.ForeignKey(
|
||||
help_text="Flow used when authorizing this provider.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="provider_authorization",
|
||||
to="authentik_flows.flow",
|
||||
),
|
||||
),
|
||||
]
|
@ -1,26 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-07 13:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.lib.migrations import fallback_names
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_core", "0025_alter_provider_authorization_flow"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fallback_names("authentik_core", "propertymapping", "name")),
|
||||
migrations.RunPython(fallback_names("authentik_core", "provider", "name")),
|
||||
migrations.AlterField(
|
||||
model_name="propertymapping",
|
||||
name="name",
|
||||
field=models.TextField(unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="name",
|
||||
field=models.TextField(unique=True),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-19 21:57
|
||||
|
||||
import uuid
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_core", "0026_alter_propertymapping_name_alter_provider_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="uuid",
|
||||
field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True),
|
||||
),
|
||||
]
|
@ -1,25 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-23 21:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
|
||||
("authentik_core", "0027_alter_user_uuid"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="provider",
|
||||
name="authentication_flow",
|
||||
field=models.ForeignKey(
|
||||
help_text="Flow used for authentication when the associated application is accessed by an un-authenticated user.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="provider_authentication",
|
||||
to="authentik_flows.flow",
|
||||
),
|
||||
),
|
||||
]
|
@ -1,7 +1,8 @@
|
||||
"""authentik core models"""
|
||||
from datetime import timedelta
|
||||
from hashlib import sha256
|
||||
from hashlib import md5, sha256
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlencode
|
||||
from uuid import uuid4
|
||||
|
||||
from deepmerge import always_merger
|
||||
@ -12,7 +13,9 @@ from django.contrib.auth.models import UserManager as DjangoUserManager
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.http import HttpRequest
|
||||
from django.templatetags.static import static
|
||||
from django.utils.functional import SimpleLazyObject, cached_property
|
||||
from django.utils.html import escape
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from guardian.mixins import GuardianUserMixin
|
||||
@ -22,15 +25,11 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.models import ManagedModel
|
||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||
from authentik.lib.avatars import get_avatar
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.config import CONFIG, get_path_from_dict
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.models import (
|
||||
CreatedUpdatedModel,
|
||||
DomainlessFormattedURLValidator,
|
||||
SerializerModel,
|
||||
)
|
||||
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
|
||||
@ -50,6 +49,9 @@ USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
|
||||
|
||||
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
||||
|
||||
|
||||
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ("authentik_used_by_shadows",)
|
||||
|
||||
@ -127,6 +129,7 @@ class Group(SerializerModel):
|
||||
return f"Group {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
unique_together = (
|
||||
(
|
||||
"name",
|
||||
@ -146,7 +149,7 @@ class UserManager(DjangoUserManager):
|
||||
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||
"""Custom User model to allow easier adding of user-based settings"""
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
||||
uuid = models.UUIDField(default=uuid4, editable=False)
|
||||
name = models.TextField(help_text=_("User's display name."))
|
||||
path = models.TextField(default="users")
|
||||
|
||||
@ -192,8 +195,6 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||
|
||||
def set_password(self, raw_password, signal=True):
|
||||
if self.pk and signal:
|
||||
from authentik.core.signals import password_changed
|
||||
|
||||
password_changed.send(sender=self, user=self, password=raw_password)
|
||||
self.password_change_date = now()
|
||||
return super().set_password(raw_password)
|
||||
@ -233,9 +234,28 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||
@property
|
||||
def avatar(self) -> str:
|
||||
"""Get avatar, depending on authentik.avatar setting"""
|
||||
return get_avatar(self)
|
||||
mode: str = CONFIG.y("avatars", "none")
|
||||
if mode == "none":
|
||||
return DEFAULT_AVATAR
|
||||
if mode.startswith("attributes."):
|
||||
return get_path_from_dict(self.attributes, mode[11:], default=DEFAULT_AVATAR)
|
||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
|
||||
if mode == "gravatar":
|
||||
parameters = [
|
||||
("s", "158"),
|
||||
("r", "g"),
|
||||
]
|
||||
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||
return escape(gravatar_url)
|
||||
return mode % {
|
||||
"username": self.username,
|
||||
"mail_hash": mail_hash,
|
||||
"upn": self.attributes.get("upn", ""),
|
||||
}
|
||||
|
||||
class Meta:
|
||||
|
||||
permissions = (
|
||||
("reset_user_password", "Reset Password"),
|
||||
("impersonate", "Can impersonate other users"),
|
||||
@ -247,23 +267,11 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||
class Provider(SerializerModel):
|
||||
"""Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application"""
|
||||
|
||||
name = models.TextField(unique=True)
|
||||
|
||||
authentication_flow = models.ForeignKey(
|
||||
"authentik_flows.Flow",
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_(
|
||||
"Flow used for authentication when the associated application is accessed by an "
|
||||
"un-authenticated user."
|
||||
),
|
||||
related_name="provider_authentication",
|
||||
)
|
||||
name = models.TextField()
|
||||
|
||||
authorization_flow = models.ForeignKey(
|
||||
"authentik_flows.Flow",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
help_text=_("Flow used when authorizing this provider."),
|
||||
related_name="provider_authorization",
|
||||
)
|
||||
@ -306,7 +314,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
)
|
||||
|
||||
meta_launch_url = models.TextField(
|
||||
default="", blank=True, validators=[DomainlessFormattedURLValidator()]
|
||||
default="", blank=True, validators=[DomainlessURLValidator()]
|
||||
)
|
||||
|
||||
open_in_new_tab = models.BooleanField(
|
||||
@ -374,6 +382,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
return str(self.name)
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Application")
|
||||
verbose_name_plural = _("Applications")
|
||||
|
||||
@ -383,15 +392,19 @@ class SourceUserMatchingModes(models.TextChoices):
|
||||
|
||||
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
||||
EMAIL_LINK = "email_link", _(
|
||||
"Link to a user with identical email address. Can have security implications "
|
||||
"when a source doesn't validate email addresses."
|
||||
(
|
||||
"Link to a user with identical email address. Can have security implications "
|
||||
"when a source doesn't validate email addresses."
|
||||
)
|
||||
)
|
||||
EMAIL_DENY = "email_deny", _(
|
||||
"Use the user's email address, but deny enrollment when the email address already exists."
|
||||
)
|
||||
USERNAME_LINK = "username_link", _(
|
||||
"Link to a user with identical username. Can have security implications "
|
||||
"when a username is used with another source."
|
||||
(
|
||||
"Link to a user with identical username. Can have security implications "
|
||||
"when a username is used with another source."
|
||||
)
|
||||
)
|
||||
USERNAME_DENY = "username_deny", _(
|
||||
"Use the user's username, but deny enrollment when the username already exists."
|
||||
@ -438,8 +451,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
choices=SourceUserMatchingModes.choices,
|
||||
default=SourceUserMatchingModes.IDENTIFIER,
|
||||
help_text=_(
|
||||
"How the source determines if an existing user should be authenticated or "
|
||||
"a new user enrolled."
|
||||
(
|
||||
"How the source determines if an existing user should be authenticated or "
|
||||
"a new user enrolled."
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
@ -485,6 +500,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
return str(self.name)
|
||||
|
||||
class Meta:
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=[
|
||||
@ -513,6 +529,7 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
|
||||
unique_together = (("user", "source"),)
|
||||
|
||||
|
||||
@ -545,6 +562,7 @@ class ExpiringModel(models.Model):
|
||||
return now() > self.expires
|
||||
|
||||
class Meta:
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
@ -610,6 +628,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
||||
return description
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Token")
|
||||
verbose_name_plural = _("Tokens")
|
||||
indexes = [
|
||||
@ -623,7 +642,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
"""User-defined key -> x mapping which can be used by providers to expose extra data."""
|
||||
|
||||
pm_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
name = models.TextField(unique=True)
|
||||
name = models.TextField()
|
||||
expression = models.TextField()
|
||||
|
||||
objects = InheritanceManager()
|
||||
@ -646,12 +665,13 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
try:
|
||||
return evaluator.evaluate(self.expression)
|
||||
except Exception as exc:
|
||||
raise PropertyMappingExpressionException(exc) from exc
|
||||
raise PropertyMappingExpressionException(str(exc)) from exc
|
||||
|
||||
def __str__(self):
|
||||
return f"Property Mapping {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Property Mapping")
|
||||
verbose_name_plural = _("Property Mappings")
|
||||
|
||||
@ -688,5 +708,6 @@ class AuthenticatedSession(ExpiringModel):
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Authenticated Session")
|
||||
verbose_name_plural = _("Authenticated Sessions")
|
||||
|
@ -10,25 +10,25 @@ from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
|
||||
# Arguments: user: User, password: str
|
||||
password_changed = Signal()
|
||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||
login_failed = Signal()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
|
||||
|
||||
@receiver(post_save, sender=Application)
|
||||
@receiver(post_save)
|
||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||
"""Clear user's application cache upon application creation"""
|
||||
from authentik.core.api.applications import user_app_cache_key
|
||||
from authentik.core.models import Application
|
||||
|
||||
if sender != Application:
|
||||
return
|
||||
if not created: # pragma: no cover
|
||||
return
|
||||
|
||||
# Also delete user application cache
|
||||
keys = cache.keys(user_app_cache_key("*"))
|
||||
cache.delete_many(keys)
|
||||
@ -37,6 +37,7 @@ def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||
@receiver(user_logged_in)
|
||||
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
||||
"""Create an AuthenticatedSession from request"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
session = AuthenticatedSession.from_request(request, user)
|
||||
if session:
|
||||
@ -46,11 +47,18 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
||||
@receiver(user_logged_out)
|
||||
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
||||
"""Delete AuthenticatedSession if it exists"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
@receiver(pre_delete)
|
||||
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
|
||||
if sender != AuthenticatedSession:
|
||||
return
|
||||
|
||||
cache_key = f"{KEY_PREFIX}{instance.session_key}"
|
||||
cache.delete(cache_key)
|
||||
|
@ -190,8 +190,11 @@ class SourceFlowManager:
|
||||
# Default case, assume deny
|
||||
error = Exception(
|
||||
_(
|
||||
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
||||
"with the source you've previously signed up with." % {"source": self.source.name}
|
||||
(
|
||||
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
||||
"with the source you've previously signed up with."
|
||||
)
|
||||
% {"source": self.source.name}
|
||||
),
|
||||
)
|
||||
return self.error_handler(error)
|
||||
|
@ -43,12 +43,7 @@ def clean_expired_models(self: MonitoredTask):
|
||||
amount = 0
|
||||
for session in AuthenticatedSession.objects.all():
|
||||
cache_key = f"{KEY_PREFIX}{session.session_key}"
|
||||
value = None
|
||||
try:
|
||||
value = cache.get(cache_key)
|
||||
# pylint: disable=broad-except
|
||||
except Exception as exc:
|
||||
LOGGER.debug("Failed to get session from cache", exc=exc)
|
||||
value = cache.get(cache_key)
|
||||
if not value:
|
||||
session.delete()
|
||||
amount += 1
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user