Compare commits
3 Commits
interfaces
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
4d791f4fef | |||
c03a069f02 | |||
040adb8ce7 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2023.4.1
|
current_version = 2022.12.3
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
|
@ -6,4 +6,3 @@ dist/**
|
|||||||
build/**
|
build/**
|
||||||
build_docs/**
|
build_docs/**
|
||||||
Dockerfile
|
Dockerfile
|
||||||
authentik/enterprise
|
|
||||||
|
@ -7,14 +7,8 @@ charset = utf-8
|
|||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
|
|
||||||
[*.html]
|
[html]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.{yaml,yml}]
|
[yaml]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.go]
|
|
||||||
indent_style = tab
|
|
||||||
|
|
||||||
[Makefile]
|
|
||||||
indent_style = tab
|
|
||||||
|
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,9 +1,10 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: ""
|
title: ''
|
||||||
labels: bug
|
labels: bug
|
||||||
assignees: ""
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Describe the bug**
|
**Describe the bug**
|
||||||
@ -11,7 +12,6 @@ A clear and concise description of what the bug is.
|
|||||||
|
|
||||||
**To Reproduce**
|
**To Reproduce**
|
||||||
Steps to reproduce the behavior:
|
Steps to reproduce the behavior:
|
||||||
|
|
||||||
1. Go to '...'
|
1. Go to '...'
|
||||||
2. Click on '....'
|
2. Click on '....'
|
||||||
3. Scroll down to '....'
|
3. Scroll down to '....'
|
||||||
@ -27,7 +27,6 @@ If applicable, add screenshots to help explain your problem.
|
|||||||
Output of docker-compose logs or kubectl logs respectively
|
Output of docker-compose logs or kubectl logs respectively
|
||||||
|
|
||||||
**Version and Deployment (please complete the following information):**
|
**Version and Deployment (please complete the following information):**
|
||||||
|
|
||||||
- authentik version: [e.g. 2021.8.5]
|
- authentik version: [e.g. 2021.8.5]
|
||||||
- Deployment: [e.g. docker-compose, helm]
|
- Deployment: [e.g. docker-compose, helm]
|
||||||
|
|
||||||
|
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
5
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,9 +1,10 @@
|
|||||||
---
|
---
|
||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ""
|
title: ''
|
||||||
labels: enhancement
|
labels: enhancement
|
||||||
assignees: ""
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
6
.github/ISSUE_TEMPLATE/question.md
vendored
6
.github/ISSUE_TEMPLATE/question.md
vendored
@ -1,9 +1,10 @@
|
|||||||
---
|
---
|
||||||
name: Question
|
name: Question
|
||||||
about: Ask a question about a feature or specific configuration
|
about: Ask a question about a feature or specific configuration
|
||||||
title: ""
|
title: ''
|
||||||
labels: question
|
labels: question
|
||||||
assignees: ""
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Describe your question/**
|
**Describe your question/**
|
||||||
@ -19,7 +20,6 @@ If applicable, add screenshots to help explain your problem.
|
|||||||
Output of docker-compose logs or kubectl logs respectively
|
Output of docker-compose logs or kubectl logs respectively
|
||||||
|
|
||||||
**Version and Deployment (please complete the following information):**
|
**Version and Deployment (please complete the following information):**
|
||||||
|
|
||||||
- authentik version: [e.g. 2021.8.5]
|
- authentik version: [e.g. 2021.8.5]
|
||||||
- Deployment: [e.g. docker-compose, helm]
|
- Deployment: [e.g. docker-compose, helm]
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: "Comment usage instructions on PRs"
|
name: 'Comment usage instructions on PRs'
|
||||||
description: "Comment usage instructions on PRs"
|
description: 'Comment usage instructions on PRs'
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
tag:
|
tag:
|
||||||
@ -17,7 +17,7 @@ runs:
|
|||||||
id: fc
|
id: fc
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
comment-author: "github-actions[bot]"
|
comment-author: 'github-actions[bot]'
|
||||||
body-includes: authentik PR Installation instructions
|
body-includes: authentik PR Installation instructions
|
||||||
- name: Create or update comment
|
- name: Create or update comment
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v2
|
||||||
@ -38,14 +38,6 @@ runs:
|
|||||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||||
```
|
```
|
||||||
|
|
||||||
For arm64, use these values:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
|
||||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
|
||||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
|
||||||
```
|
|
||||||
|
|
||||||
Afterwards, run the upgrade commands from the latest release notes.
|
Afterwards, run the upgrade commands from the latest release notes.
|
||||||
</details>
|
</details>
|
||||||
<details>
|
<details>
|
||||||
@ -62,17 +54,6 @@ runs:
|
|||||||
tag: ${{ inputs.tag }}
|
tag: ${{ inputs.tag }}
|
||||||
```
|
```
|
||||||
|
|
||||||
For arm64, use these values:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
authentik:
|
|
||||||
outposts:
|
|
||||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
|
||||||
image:
|
|
||||||
repository: ghcr.io/goauthentik/dev-server
|
|
||||||
tag: ${{ inputs.tag }}-arm64
|
|
||||||
```
|
|
||||||
|
|
||||||
Afterwards, run the upgrade commands from the latest release notes.
|
Afterwards, run the upgrade commands from the latest release notes.
|
||||||
</details>
|
</details>
|
||||||
edit-mode: replace
|
edit-mode: replace
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: "Prepare docker environment variables"
|
name: 'Prepare docker environment variables'
|
||||||
description: "Prepare docker environment variables"
|
description: 'Prepare docker environment variables'
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
shouldBuild:
|
shouldBuild:
|
||||||
@ -17,9 +17,6 @@ outputs:
|
|||||||
sha:
|
sha:
|
||||||
description: "sha"
|
description: "sha"
|
||||||
value: ${{ steps.ev.outputs.sha }}
|
value: ${{ steps.ev.outputs.sha }}
|
||||||
shortHash:
|
|
||||||
description: "shortHash"
|
|
||||||
value: ${{ steps.ev.outputs.shortHash }}
|
|
||||||
version:
|
version:
|
||||||
description: "version"
|
description: "version"
|
||||||
value: ${{ steps.ev.outputs.version }}
|
value: ${{ steps.ev.outputs.version }}
|
||||||
@ -56,7 +53,6 @@ runs:
|
|||||||
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
print("branchNameContainer=%s" % safe_branch_name, file=_output)
|
||||||
print("timestamp=%s" % int(time()), file=_output)
|
print("timestamp=%s" % int(time()), file=_output)
|
||||||
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
|
||||||
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
|
|
||||||
print("shouldBuild=%s" % should_build, file=_output)
|
print("shouldBuild=%s" % should_build, file=_output)
|
||||||
print("version=%s" % version, file=_output)
|
print("version=%s" % version, file=_output)
|
||||||
print("versionFamily=%s" % version_family, file=_output)
|
print("versionFamily=%s" % version_family, file=_output)
|
||||||
|
18
.github/actions/setup/action.yml
vendored
18
.github/actions/setup/action.yml
vendored
@ -1,10 +1,5 @@
|
|||||||
name: "Setup authentik testing environment"
|
name: 'Setup authentik testing environment'
|
||||||
description: "Setup authentik testing environment"
|
description: 'Setup authentik testing environment'
|
||||||
|
|
||||||
inputs:
|
|
||||||
postgresql_tag:
|
|
||||||
description: "Optional postgresql image tag"
|
|
||||||
default: "12"
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@ -18,18 +13,17 @@ runs:
|
|||||||
- name: Setup python and restore poetry
|
- name: Setup python and restore poetry
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: '3.11'
|
||||||
cache: "poetry"
|
cache: 'poetry'
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3.1.0
|
uses: actions/setup-node@v3.1.0
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Setup dependencies
|
- name: Setup dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
|
||||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||||
poetry env use python3.11
|
poetry env use python3.11
|
||||||
poetry install
|
poetry install
|
||||||
|
4
.github/actions/setup/docker-compose.yml
vendored
4
.github/actions/setup/docker-compose.yml
vendored
@ -1,9 +1,9 @@
|
|||||||
version: "3.7"
|
version: '3.7'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgresql:
|
postgresql:
|
||||||
container_name: postgres
|
container_name: postgres
|
||||||
image: library/postgres:${PSQL_TAG:-12}
|
image: library/postgres:12
|
||||||
volumes:
|
volumes:
|
||||||
- db-data:/var/lib/postgresql/data
|
- db-data:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
|
11
.github/codecov.yml
vendored
11
.github/codecov.yml
vendored
@ -1,10 +1,3 @@
|
|||||||
coverage:
|
coverage:
|
||||||
status:
|
precision: 2
|
||||||
project:
|
round: up
|
||||||
default:
|
|
||||||
target: auto
|
|
||||||
# adjust accordingly based on how flaky your tests are
|
|
||||||
# this allows a 1% drop from the previous base commit coverage
|
|
||||||
threshold: 1%
|
|
||||||
notify:
|
|
||||||
after_n_builds: 3
|
|
||||||
|
1
.github/codespell-dictionary.txt
vendored
1
.github/codespell-dictionary.txt
vendored
@ -1 +0,0 @@
|
|||||||
authentic->authentik
|
|
11
.github/pull_request_template.md
vendored
11
.github/pull_request_template.md
vendored
@ -5,20 +5,15 @@ Please check the [Contributing guidelines](https://github.com/goauthentik/authen
|
|||||||
-->
|
-->
|
||||||
|
|
||||||
# Details
|
# Details
|
||||||
|
* **Does this resolve an issue?**
|
||||||
- **Does this resolve an issue?**
|
|
||||||
Resolves #
|
Resolves #
|
||||||
|
|
||||||
## Changes
|
## Changes
|
||||||
|
|
||||||
### New Features
|
### New Features
|
||||||
|
* Adds feature which does x, y, and z.
|
||||||
- Adds feature which does x, y, and z.
|
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
* Adds breaking change which causes \<issue\>.
|
||||||
- Adds breaking change which causes \<issue\>.
|
|
||||||
|
|
||||||
## Additional
|
## Additional
|
||||||
|
|
||||||
Any further notes or comments you want to make.
|
Any further notes or comments you want to make.
|
||||||
|
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
@ -16,4 +16,3 @@ markComment: >
|
|||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
for your contributions.
|
for your contributions.
|
||||||
only: issues
|
|
||||||
|
4
.github/transifex.yml
vendored
4
.github/transifex.yml
vendored
@ -6,11 +6,11 @@ git:
|
|||||||
source_language: en
|
source_language: en
|
||||||
source_file: web/src/locales/en.po
|
source_file: web/src/locales/en.po
|
||||||
# path expression to translation files, must contain <lang> placeholder
|
# path expression to translation files, must contain <lang> placeholder
|
||||||
translation_files_expression: "web/src/locales/<lang>.po"
|
translation_files_expression: 'web/src/locales/<lang>.po'
|
||||||
- filter_type: file
|
- filter_type: file
|
||||||
# all supported i18n types: https://docs.transifex.com/formats
|
# all supported i18n types: https://docs.transifex.com/formats
|
||||||
file_format: PO
|
file_format: PO
|
||||||
source_language: en
|
source_language: en
|
||||||
source_file: locale/en/LC_MESSAGES/django.po
|
source_file: locale/en/LC_MESSAGES/django.po
|
||||||
# path expression to translation files, must contain <lang> placeholder
|
# path expression to translation files, must contain <lang> placeholder
|
||||||
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
|
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'
|
||||||
|
133
.github/workflows/ci-main.yml
vendored
133
.github/workflows/ci-main.yml
vendored
@ -23,14 +23,12 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
job:
|
job:
|
||||||
- bandit
|
|
||||||
- black
|
|
||||||
- codespell
|
|
||||||
- isort
|
|
||||||
- pending-migrations
|
|
||||||
- pylint
|
- pylint
|
||||||
|
- black
|
||||||
|
- isort
|
||||||
|
- bandit
|
||||||
- pyright
|
- pyright
|
||||||
- ruff
|
- pending-migrations
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
@ -61,7 +59,7 @@ jobs:
|
|||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
mv ../.github ../scripts .
|
||||||
- name: Setup authentik env (ensure stable deps are installed)
|
- name: Setup authentik env (ensure stable deps are installed)
|
||||||
@ -81,21 +79,11 @@ jobs:
|
|||||||
- name: migrate to latest
|
- name: migrate to latest
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-unittest:
|
test-unittest:
|
||||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
psql:
|
|
||||||
- 11-alpine
|
|
||||||
- 12-alpine
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_tag: ${{ matrix.psql }}
|
|
||||||
- name: run unittest
|
- name: run unittest
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make test
|
||||||
@ -106,7 +94,6 @@ jobs:
|
|||||||
flags: unit
|
flags: unit
|
||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -115,34 +102,14 @@ jobs:
|
|||||||
uses: helm/kind-action@v1.5.0
|
uses: helm/kind-action@v1.5.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
poetry run make test-integration
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
flags: integration
|
flags: integration
|
||||||
test-e2e:
|
test-e2e-provider:
|
||||||
name: test-e2e (${{ matrix.job.name }})
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
job:
|
|
||||||
- name: proxy
|
|
||||||
glob: tests/e2e/test_provider_proxy*
|
|
||||||
- name: oauth
|
|
||||||
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
|
|
||||||
- name: oauth-oidc
|
|
||||||
glob: tests/e2e/test_provider_oidc*
|
|
||||||
- name: saml
|
|
||||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
|
||||||
- name: ldap
|
|
||||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
|
||||||
- name: radius
|
|
||||||
glob: tests/e2e/test_provider_radius*
|
|
||||||
- name: flows
|
|
||||||
glob: tests/e2e/test_flows*
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -164,7 +131,36 @@ jobs:
|
|||||||
npm run build
|
npm run build
|
||||||
- name: run e2e
|
- name: run e2e
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
poetry run make test-e2e-provider
|
||||||
|
poetry run coverage xml
|
||||||
|
- if: ${{ always() }}
|
||||||
|
uses: codecov/codecov-action@v3
|
||||||
|
with:
|
||||||
|
flags: e2e
|
||||||
|
test-e2e-rest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup authentik env
|
||||||
|
uses: ./.github/actions/setup
|
||||||
|
- name: Setup e2e env (chrome, etc)
|
||||||
|
run: |
|
||||||
|
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||||
|
- id: cache-web
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: web/dist
|
||||||
|
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
|
||||||
|
- name: prepare web ui
|
||||||
|
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||||
|
working-directory: web/
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
make -C .. gen-client-ts
|
||||||
|
npm run build
|
||||||
|
- name: run e2e
|
||||||
|
run: |
|
||||||
|
poetry run make test-e2e-rest
|
||||||
poetry run coverage xml
|
poetry run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v3
|
||||||
@ -177,7 +173,8 @@ jobs:
|
|||||||
- test-migrations-from-stable
|
- test-migrations-from-stable
|
||||||
- test-unittest
|
- test-unittest
|
||||||
- test-integration
|
- test-integration
|
||||||
- test-e2e
|
- test-e2e-rest
|
||||||
|
- test-e2e-provider
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
@ -185,6 +182,11 @@ jobs:
|
|||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch:
|
||||||
|
- 'linux/amd64'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@ -203,8 +205,8 @@ jobs:
|
|||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Building Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
@ -212,51 +214,14 @@ jobs:
|
|||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
tags: |
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
|
platforms: ${{ matrix.arch }}
|
||||||
- name: Comment on PR
|
- name: Comment on PR
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: ./.github/actions/comment-pr-instructions
|
uses: ./.github/actions/comment-pr-instructions
|
||||||
with:
|
with:
|
||||||
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
|
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||||
build-arm64:
|
|
||||||
needs: ci-core-mark
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 120
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2.1.0
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: prepare variables
|
|
||||||
uses: ./.github/actions/docker-push-variables
|
|
||||||
id: ev
|
|
||||||
env:
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
- name: Login to Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
secrets: |
|
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
|
||||||
tags: |
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
|
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
|
|
||||||
build-args: |
|
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
|
||||||
platforms: linux/arm64
|
|
||||||
|
40
.github/workflows/ci-outpost.yml
vendored
40
.github/workflows/ci-outpost.yml
vendored
@ -15,9 +15,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version: "^1.17"
|
||||||
- name: Prepare and generate API
|
- name: Prepare and generate API
|
||||||
run: |
|
run: |
|
||||||
# Create folder structure for go embeds
|
# Create folder structure for go embeds
|
||||||
@ -28,15 +28,13 @@ jobs:
|
|||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v3
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
|
||||||
args: --timeout 5000s
|
|
||||||
test-unittest:
|
test-unittest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version: "^1.17"
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Go unittests
|
- name: Go unittests
|
||||||
@ -49,7 +47,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
build-container:
|
build:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
- ci-outpost-mark
|
- ci-outpost-mark
|
||||||
@ -59,7 +57,8 @@ jobs:
|
|||||||
type:
|
type:
|
||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
arch:
|
||||||
|
- 'linux/amd64'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
@ -81,20 +80,20 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Build Docker Image
|
- name: Building Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
tags: |
|
||||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||||
|
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
|
||||||
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
build-args: |
|
build-args: |
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: ${{ matrix.arch }}
|
||||||
context: .
|
build-outpost-binary:
|
||||||
build-binary:
|
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
- ci-outpost-mark
|
- ci-outpost-mark
|
||||||
@ -105,18 +104,17 @@ jobs:
|
|||||||
type:
|
type:
|
||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
|
||||||
goos: [linux]
|
goos: [linux]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version: "^1.17"
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
@ -131,3 +129,7 @@ jobs:
|
|||||||
export GOOS=${{ matrix.goos }}
|
export GOOS=${{ matrix.goos }}
|
||||||
export GOARCH=${{ matrix.goarch }}
|
export GOARCH=${{ matrix.goarch }}
|
||||||
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
|
||||||
|
30
.github/workflows/ci-web.yml
vendored
30
.github/workflows/ci-web.yml
vendored
@ -15,10 +15,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@ -31,10 +31,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@ -47,10 +47,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@ -63,10 +63,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
run: |
|
run: |
|
||||||
@ -95,10 +95,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
43
.github/workflows/ci-website.yml
vendored
43
.github/workflows/ci-website.yml
vendored
@ -15,56 +15,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: prettier
|
- name: prettier
|
||||||
working-directory: website/
|
working-directory: website/
|
||||||
run: npm run prettier-check
|
run: npm run prettier-check
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-node@v3.6.0
|
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: website/package-lock.json
|
|
||||||
- working-directory: website/
|
|
||||||
run: npm ci
|
|
||||||
- name: test
|
|
||||||
working-directory: website/
|
|
||||||
run: npm test
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: ${{ matrix.job }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
job:
|
|
||||||
- build
|
|
||||||
- build-docs-only
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-node@v3.6.0
|
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
cache: "npm"
|
|
||||||
cache-dependency-path: website/package-lock.json
|
|
||||||
- working-directory: website/
|
|
||||||
run: npm ci
|
|
||||||
- name: build
|
|
||||||
working-directory: website/
|
|
||||||
run: npm run ${{ matrix.job }}
|
|
||||||
ci-website-mark:
|
ci-website-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint-prettier
|
- lint-prettier
|
||||||
- test
|
|
||||||
- build
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo mark
|
- run: echo mark
|
||||||
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@ -2,12 +2,12 @@ name: "CodeQL"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, "*", next, version*]
|
branches: [ main, '*', next, version* ]
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
# The branches below must be a subset of the branches above
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "30 6 * * 5"
|
- cron: '30 6 * * 5'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
@ -21,7 +21,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: ["go", "javascript", "python"]
|
language: [ 'go', 'javascript', 'python' ]
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||||
# Learn more:
|
# Learn more:
|
||||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||||
|
6
.github/workflows/ghcr-retention.yml
vendored
6
.github/workflows/ghcr-retention.yml
vendored
@ -2,7 +2,7 @@ name: ghcr-retention
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 0 * * *" # every day at midnight
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -11,12 +11,12 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Delete 'dev' containers older than a week
|
- name: Delete 'dev' containers older than a week
|
||||||
uses: snok/container-retention-policy@v2
|
uses: sondrelg/container-retention-policy@v1
|
||||||
with:
|
with:
|
||||||
image-names: dev-server,dev-ldap,dev-proxy
|
image-names: dev-server,dev-ldap,dev-proxy
|
||||||
cut-off: One week ago UTC
|
cut-off: One week ago UTC
|
||||||
account-type: org
|
account-type: org
|
||||||
org-name: goauthentik
|
org-name: goauthentik
|
||||||
untagged-only: false
|
untagged-only: false
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
|
||||||
skip-tags: gh-next,gh-main
|
skip-tags: gh-next,gh-main
|
||||||
|
33
.github/workflows/release-publish.yml
vendored
33
.github/workflows/release-publish.yml
vendored
@ -27,11 +27,11 @@ jobs:
|
|||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Building Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
secrets: |
|
secrets:
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
tags: |
|
tags: |
|
||||||
@ -52,12 +52,11 @@ jobs:
|
|||||||
type:
|
type:
|
||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version: "^1.17"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.1.0
|
uses: docker/setup-qemu-action@v2.1.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@ -76,8 +75,8 @@ jobs:
|
|||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Building Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
@ -89,6 +88,9 @@ jobs:
|
|||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
secrets: |
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
build-args: |
|
build-args: |
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
@ -100,18 +102,17 @@ jobs:
|
|||||||
type:
|
type:
|
||||||
- proxy
|
- proxy
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
|
||||||
goos: [linux, darwin]
|
goos: [linux, darwin]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version: "^1.17"
|
||||||
- uses: actions/setup-node@v3.6.0
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
node-version: "20"
|
node-version: '16'
|
||||||
cache: "npm"
|
cache: 'npm'
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Build web
|
- name: Build web
|
||||||
working-directory: web/
|
working-directory: web/
|
||||||
@ -173,5 +174,5 @@ jobs:
|
|||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
with:
|
with:
|
||||||
version: authentik@${{ steps.ev.outputs.version }}
|
version: authentik@${{ steps.ev.outputs.version }}
|
||||||
sourcemaps: "./web/dist"
|
sourcemaps: './web/dist'
|
||||||
url_prefix: "~/static/dist"
|
url_prefix: '~/static/dist'
|
||||||
|
6
.github/workflows/release-tag.yml
vendored
6
.github/workflows/release-tag.yml
vendored
@ -3,7 +3,7 @@ name: authentik-on-tag
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- "version/*"
|
- 'version/*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@ -26,14 +26,14 @@ jobs:
|
|||||||
id: get_version
|
id: get_version
|
||||||
uses: actions/github-script@v6
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1.1.4
|
uses: actions/create-release@v1.1.4
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ github.ref }}
|
tag_name: ${{ github.ref }}
|
||||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||||
|
10
.github/workflows/translation-compile.yml
vendored
10
.github/workflows/translation-compile.yml
vendored
@ -3,10 +3,10 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
paths:
|
paths:
|
||||||
- "/locale/"
|
- '/locale/'
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- "/locale/"
|
- '/locale/'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -19,17 +19,15 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run compile
|
- name: run compile
|
||||||
run: poetry run ./manage.py compilemessages
|
run: poetry run ./manage.py compilemessages
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v4
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: compile-backend-translation
|
branch: compile-backend-translation
|
||||||
commit-message: "core: compile backend translations"
|
commit-message: "core: compile backend translations"
|
||||||
title: "core: compile backend translations"
|
title: "core: compile backend translations"
|
||||||
|
22
.github/workflows/web-api-publish.yml
vendored
22
.github/workflows/web-api-publish.yml
vendored
@ -3,19 +3,17 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
paths:
|
paths:
|
||||||
- "schema.yml"
|
- 'schema.yml'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3.5.1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
node-version: '16'
|
||||||
- uses: actions/setup-node@v3.6.0
|
registry-url: 'https://registry.npmjs.org'
|
||||||
with:
|
|
||||||
node-version: "20"
|
|
||||||
registry-url: "https://registry.npmjs.org"
|
|
||||||
- name: Generate API Client
|
- name: Generate API Client
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: Publish package
|
- name: Publish package
|
||||||
@ -30,20 +28,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||||
npm i @goauthentik/api@$VERSION
|
npm i @goauthentik/api@$VERSION
|
||||||
- uses: peter-evans/create-pull-request@v5
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v4
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: update-web-api-client
|
branch: update-web-api-client
|
||||||
commit-message: "web: bump API Client version"
|
commit-message: "web: bump API Client version"
|
||||||
title: "web: bump API Client version"
|
title: "web: bump API Client version"
|
||||||
body: "web: bump API Client version"
|
body: "web: bump API Client version"
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
signoff: true
|
signoff: true
|
||||||
team-reviewers: "@goauthentik/core"
|
|
||||||
author: authentik bot <github-bot@goauthentik.io>
|
|
||||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_GITHUB_TOKEN }}
|
|
||||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
|
||||||
merge-method: squash
|
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -200,6 +200,3 @@ media/
|
|||||||
.idea/
|
.idea/
|
||||||
/gen-*/
|
/gen-*/
|
||||||
data/
|
data/
|
||||||
|
|
||||||
# Local Netlify folder
|
|
||||||
.netlify
|
|
||||||
|
20
.vscode/extensions.json
vendored
20
.vscode/extensions.json
vendored
@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": [
|
|
||||||
"EditorConfig.EditorConfig",
|
|
||||||
"bashmish.es6-string-css",
|
|
||||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
|
||||||
"dbaeumer.vscode-eslint",
|
|
||||||
"esbenp.prettier-vscode",
|
|
||||||
"golang.go",
|
|
||||||
"Gruntfuggly.todo-tree",
|
|
||||||
"mechatroner.rainbow-csv",
|
|
||||||
"ms-python.black-formatter",
|
|
||||||
"ms-python.isort",
|
|
||||||
"ms-python.pylint",
|
|
||||||
"ms-python.python",
|
|
||||||
"ms-python.vscode-pylance",
|
|
||||||
"redhat.vscode-yaml",
|
|
||||||
"Tobermory.es6-string-html",
|
|
||||||
"unifiedjs.vscode-mdx"
|
|
||||||
]
|
|
||||||
}
|
|
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@ -14,10 +14,7 @@
|
|||||||
"webauthn",
|
"webauthn",
|
||||||
"traefik",
|
"traefik",
|
||||||
"passwordless",
|
"passwordless",
|
||||||
"kubernetes",
|
"kubernetes"
|
||||||
"sso",
|
|
||||||
"slo",
|
|
||||||
"scim",
|
|
||||||
],
|
],
|
||||||
"python.linting.pylintEnabled": true,
|
"python.linting.pylintEnabled": true,
|
||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
@ -47,6 +44,5 @@
|
|||||||
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
"url": "https://github.com/goauthentik/authentik/issues/<num>",
|
||||||
"ignoreCase": false
|
"ignoreCase": false
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"go.testFlags": ["-count=1"]
|
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,6 @@ The following is a set of guidelines for contributing to authentik and its compo
|
|||||||
- [Reporting Bugs](#reporting-bugs)
|
- [Reporting Bugs](#reporting-bugs)
|
||||||
- [Suggesting Enhancements](#suggesting-enhancements)
|
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||||
- [Your First Code Contribution](#your-first-code-contribution)
|
- [Your First Code Contribution](#your-first-code-contribution)
|
||||||
- [Help with the Docs](#help-with-the-docs)
|
|
||||||
- [Pull Requests](#pull-requests)
|
- [Pull Requests](#pull-requests)
|
||||||
|
|
||||||
[Styleguides](#styleguides)
|
[Styleguides](#styleguides)
|
||||||
@ -60,18 +59,19 @@ These are the current packages:
|
|||||||
authentik
|
authentik
|
||||||
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
|
||||||
├── api - General API Configuration (Routes, Schema and general API utilities)
|
├── api - General API Configuration (Routes, Schema and general API utilities)
|
||||||
├── blueprints - Handle managed models and their state.
|
|
||||||
├── core - Core authentik functionality, central routes, core Models
|
├── core - Core authentik functionality, central routes, core Models
|
||||||
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
|
||||||
├── events - Event Log, middleware and signals to generate signals
|
├── events - Event Log, middleware and signals to generate signals
|
||||||
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
|
||||||
├── lib - Generic library of functions, few dependencies on other packages.
|
├── lib - Generic library of functions, few dependencies on other packages.
|
||||||
|
├── managed - Handle managed models and their state.
|
||||||
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
├── outposts - Configure and deploy outposts on kubernetes and docker.
|
||||||
├── policies - General PolicyEngine
|
├── policies - General PolicyEngine
|
||||||
│ ├── dummy - A Dummy policy used for testing
|
│ ├── dummy - A Dummy policy used for testing
|
||||||
│ ├── event_matcher - Match events based on different criteria
|
│ ├── event_matcher - Match events based on different criteria
|
||||||
│ ├── expiry - Check when a user's password was last set
|
│ ├── expiry - Check when a user's password was last set
|
||||||
│ ├── expression - Execute any arbitrary python code
|
│ ├── expression - Execute any arbitrary python code
|
||||||
|
│ ├── hibp - Check a password against HaveIBeenPwned
|
||||||
│ ├── password - Check a password against several rules
|
│ ├── password - Check a password against several rules
|
||||||
│ └── reputation - Check the user's/client's reputation
|
│ └── reputation - Check the user's/client's reputation
|
||||||
├── providers
|
├── providers
|
||||||
@ -136,9 +136,6 @@ authentik can be run locally, all though depending on which part you want to wor
|
|||||||
|
|
||||||
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
This is documented in the [developer docs](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||||
|
|
||||||
### Help with the Docs
|
|
||||||
Contributions to the technical documentation are greatly appreciated. Open a PR if you have improvements to make or new content to add. If you have questions or suggestions about the documentation, open an Issue. No contribution is too small.
|
|
||||||
|
|
||||||
### Pull Requests
|
### Pull Requests
|
||||||
|
|
||||||
The process described here has several goals:
|
The process described here has several goals:
|
||||||
@ -158,19 +155,12 @@ While the prerequisites above must be satisfied prior to having your pull reques
|
|||||||
|
|
||||||
## Styleguides
|
## Styleguides
|
||||||
|
|
||||||
### PR naming
|
|
||||||
|
|
||||||
- Use the format of `<package>: <verb> <description>`
|
|
||||||
- See [here](#authentik-packages) for `package`
|
|
||||||
- Example: `providers/saml2: fix parsing of requests`
|
|
||||||
|
|
||||||
### Git Commit Messages
|
### Git Commit Messages
|
||||||
|
|
||||||
- Use the format of `<package>: <verb> <description>`
|
- Use the format of `<package>: <verb> <description>`
|
||||||
- See [here](#authentik-packages) for `package`
|
- See [here](#authentik-packages) for `package`
|
||||||
- Example: `providers/saml2: fix parsing of requests`
|
- Example: `providers/saml2: fix parsing of requests`
|
||||||
- Reference issues and pull requests liberally after the first line
|
- Reference issues and pull requests liberally after the first line
|
||||||
- Naming of commits within a PR does not need to adhere to the guidelines as we squash merge PRs
|
|
||||||
|
|
||||||
### Python Styleguide
|
### Python Styleguide
|
||||||
|
|
||||||
|
21
Dockerfile
21
Dockerfile
@ -1,5 +1,5 @@
|
|||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./blueprints /work/blueprints/
|
COPY ./blueprints /work/blueprints/
|
||||||
@ -10,7 +10,7 @@ WORKDIR /work/website
|
|||||||
RUN npm ci && npm run build-docs-only
|
RUN npm ci && npm run build-docs-only
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
|
||||||
|
|
||||||
COPY ./web /work/web/
|
COPY ./web /work/web/
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
@ -20,7 +20,7 @@ WORKDIR /work/web
|
|||||||
RUN npm ci && npm run build
|
RUN npm ci && npm run build
|
||||||
|
|
||||||
# Stage 3: Poetry to requirements.txt export
|
# Stage 3: Poetry to requirements.txt export
|
||||||
FROM docker.io/python:3.11.3-slim-bullseye AS poetry-locker
|
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY ./pyproject.toml /work
|
COPY ./pyproject.toml /work
|
||||||
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
|||||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||||
|
|
||||||
# Stage 4: Build go proxy
|
# Stage 4: Build go proxy
|
||||||
FROM docker.io/golang:1.20.3-bullseye AS go-builder
|
FROM docker.io/golang:1.19.4-bullseye AS go-builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
|
||||||
@ -47,10 +47,9 @@ COPY ./go.sum /work/go.sum
|
|||||||
RUN go build -o /work/authentik ./cmd/server/
|
RUN go build -o /work/authentik ./cmd/server/
|
||||||
|
|
||||||
# Stage 5: MaxMind GeoIP
|
# Stage 5: MaxMind GeoIP
|
||||||
FROM docker.io/maxmindinc/geoipupdate:v5.0 as geoip
|
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
|
||||||
|
|
||||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||||
@ -58,11 +57,11 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "\
|
/bin/sh -c "\
|
||||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||||
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
|
/usr/bin/entry.sh || exit 0 \
|
||||||
"
|
"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.11.3-slim-bullseye AS final-image
|
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||||
@ -83,7 +82,7 @@ RUN apt-get update && \
|
|||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends curl runit && \
|
||||||
pip install --no-cache-dir -r /requirements.txt && \
|
pip install --no-cache-dir -r /requirements.txt && \
|
||||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||||
apt-get autoremove --purge -y && \
|
apt-get autoremove --purge -y && \
|
||||||
@ -96,13 +95,13 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
COPY ./schemas /schemas
|
COPY ./xml /xml
|
||||||
COPY ./locale /locale
|
COPY ./locale /locale
|
||||||
COPY ./tests /tests
|
COPY ./tests /tests
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./blueprints /blueprints
|
COPY ./blueprints /blueprints
|
||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
COPY --from=go-builder /work/authentik /bin/authentik
|
COPY --from=go-builder /work/authentik /authentik-proxy
|
||||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
COPY --from=website-builder /work/website/help/ /website/help/
|
COPY --from=website-builder /work/website/help/ /website/help/
|
||||||
|
9
LICENSE
9
LICENSE
@ -1,11 +1,6 @@
|
|||||||
Copyright (c) 2023 Jens Langhammer
|
MIT License
|
||||||
|
|
||||||
Portions of this software are licensed as follows:
|
Copyright (c) 2022 Jens Langhammer
|
||||||
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
|
|
||||||
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
|
|
||||||
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
|
|
||||||
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
|
|
||||||
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
80
Makefile
80
Makefile
@ -3,24 +3,18 @@ PWD = $(shell pwd)
|
|||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle
|
|
||||||
|
|
||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
|
||||||
-I .github/codespell-words.txt \
|
|
||||||
-S 'web/src/locales/**' \
|
|
||||||
authentik \
|
|
||||||
internal \
|
|
||||||
cmd \
|
|
||||||
web/src \
|
|
||||||
website/src \
|
|
||||||
website/blog \
|
|
||||||
website/developer-docs \
|
|
||||||
website/docs \
|
|
||||||
website/integrations \
|
|
||||||
website/src
|
|
||||||
|
|
||||||
all: lint-fix lint test gen web
|
all: lint-fix lint test gen web
|
||||||
|
|
||||||
|
test-integration:
|
||||||
|
coverage run manage.py test tests/integration
|
||||||
|
|
||||||
|
test-e2e-provider:
|
||||||
|
coverage run manage.py test tests/e2e/test_provider*
|
||||||
|
|
||||||
|
test-e2e-rest:
|
||||||
|
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||||
|
|
||||||
test-go:
|
test-go:
|
||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
@ -39,19 +33,28 @@ test:
|
|||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
lint-fix:
|
lint-fix:
|
||||||
isort authentik $(PY_SOURCES)
|
isort authentik tests scripts lifecycle
|
||||||
black authentik $(PY_SOURCES)
|
black authentik tests scripts lifecycle
|
||||||
ruff authentik $(PY_SOURCES)
|
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w \
|
||||||
codespell -w $(CODESPELL_ARGS)
|
authentik \
|
||||||
|
internal \
|
||||||
|
cmd \
|
||||||
|
web/src \
|
||||||
|
website/src \
|
||||||
|
website/docs \
|
||||||
|
website/developer-docs
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
pylint $(PY_SOURCES)
|
pylint authentik tests lifecycle
|
||||||
bandit -r $(PY_SOURCES) -x node_modules
|
bandit -r authentik tests lifecycle -x node_modules
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
migrate:
|
migrate:
|
||||||
python -m lifecycle.migrate
|
python -m lifecycle.migrate
|
||||||
|
|
||||||
|
run:
|
||||||
|
go run -v ./cmd/server/
|
||||||
|
|
||||||
i18n-extract: i18n-extract-core web-extract
|
i18n-extract: i18n-extract-core web-extract
|
||||||
|
|
||||||
i18n-extract-core:
|
i18n-extract-core:
|
||||||
@ -65,20 +68,15 @@ gen-build:
|
|||||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||||
|
|
||||||
gen-changelog:
|
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
|
||||||
npx prettier --write changelog.md
|
|
||||||
|
|
||||||
gen-diff:
|
gen-diff:
|
||||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
git show $(shell git describe --abbrev=0):schema.yml > old_schema.yml
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-diff:2.1.0-beta.6 \
|
docker.io/openapitools/openapi-diff:2.1.0-beta.3 \
|
||||||
--markdown /local/diff.md \
|
--markdown /local/diff.md \
|
||||||
/local/old_schema.yml /local/schema.yml
|
/local/old_schema.yml /local/schema.yml
|
||||||
rm old_schema.yml
|
rm old_schema.yml
|
||||||
npx prettier --write diff.md
|
|
||||||
|
|
||||||
gen-clean:
|
gen-clean:
|
||||||
rm -rf web/api/src/
|
rm -rf web/api/src/
|
||||||
@ -88,7 +86,7 @@ gen-client-ts:
|
|||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g typescript-fetch \
|
-g typescript-fetch \
|
||||||
-o /local/gen-ts-api \
|
-o /local/gen-ts-api \
|
||||||
@ -101,21 +99,20 @@ gen-client-ts:
|
|||||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-client-go:
|
gen-client-go:
|
||||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
mkdir -p templates
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O ./gen-go-api/templates/go.mod.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||||
cp schema.yml ./gen-go-api/
|
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}/gen-go-api:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v6.0.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g go \
|
-g go \
|
||||||
-o /local/ \
|
-o /local/gen-go-api \
|
||||||
-c /local/config.yaml
|
-c /local/config.yaml
|
||||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
rm -rf config.yaml ./templates/
|
||||||
|
|
||||||
gen-dev-config:
|
gen-dev-config:
|
||||||
python -m scripts.generate_config
|
python -m scripts.generate_config
|
||||||
@ -173,6 +170,7 @@ website-watch:
|
|||||||
|
|
||||||
# These targets are use by GitHub actions to allow usage of matrix
|
# These targets are use by GitHub actions to allow usage of matrix
|
||||||
# which makes the YAML File a lot smaller
|
# which makes the YAML File a lot smaller
|
||||||
|
PY_SOURCES=authentik tests lifecycle
|
||||||
ci--meta-debug:
|
ci--meta-debug:
|
||||||
python -V
|
python -V
|
||||||
node --version
|
node --version
|
||||||
@ -183,12 +181,6 @@ ci-pylint: ci--meta-debug
|
|||||||
ci-black: ci--meta-debug
|
ci-black: ci--meta-debug
|
||||||
black --check $(PY_SOURCES)
|
black --check $(PY_SOURCES)
|
||||||
|
|
||||||
ci-ruff: ci--meta-debug
|
|
||||||
ruff check $(PY_SOURCES)
|
|
||||||
|
|
||||||
ci-codespell: ci--meta-debug
|
|
||||||
codespell $(CODESPELL_ARGS) -s
|
|
||||||
|
|
||||||
ci-isort: ci--meta-debug
|
ci-isort: ci--meta-debug
|
||||||
isort --check $(PY_SOURCES)
|
isort --check $(PY_SOURCES)
|
||||||
|
|
||||||
|
20
README.md
20
README.md
@ -15,13 +15,13 @@
|
|||||||
|
|
||||||
## What is authentik?
|
## What is authentik?
|
||||||
|
|
||||||
Authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. Authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
|
authentik is an open-source Identity Provider focused on flexibility and versatility. You can use authentik in an existing environment to add support for new protocols. authentik is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
For small/test setups it is recommended to use docker-compose, see the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github)
|
||||||
|
|
||||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github)
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
@ -32,16 +32,12 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
See [Development Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
See [SECURITY.md](SECURITY.md)
|
See [SECURITY.md](SECURITY.md)
|
||||||
|
|
||||||
## Adoption and Contributions
|
|
||||||
|
|
||||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
|
||||||
|
|
||||||
## Sponsors
|
## Sponsors
|
||||||
|
|
||||||
This project is proudly sponsored by:
|
This project is proudly sponsored by:
|
||||||
@ -53,3 +49,11 @@ This project is proudly sponsored by:
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
DigitalOcean provides development and testing resources for authentik.
|
DigitalOcean provides development and testing resources for authentik.
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="https://www.netlify.com">
|
||||||
|
<img src="https://www.netlify.com/img/global/badges/netlify-color-accent.svg" alt="Deploys by Netlify" />
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||||
|
@ -6,8 +6,8 @@ Authentik takes security very seriously. We follow the rules of [responsible dis
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| --------- | ------------------ |
|
| --------- | ------------------ |
|
||||||
| 2023.2.x | :white_check_mark: |
|
| 2022.11.x | :white_check_mark: |
|
||||||
| 2023.3.x | :white_check_mark: |
|
| 2022.12.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2023.4.1"
|
__version__ = "2022.12.3"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +1,4 @@
|
|||||||
"""authentik administration metrics"""
|
"""authentik administration metrics"""
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
from django.db.models.functions import ExtractHour
|
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||||
@ -24,44 +21,38 @@ class CoordinateSerializer(PassiveSerializer):
|
|||||||
class LoginMetricsSerializer(PassiveSerializer):
|
class LoginMetricsSerializer(PassiveSerializer):
|
||||||
"""Login Metrics per 1h"""
|
"""Login Metrics per 1h"""
|
||||||
|
|
||||||
logins = SerializerMethodField()
|
logins_per_1h = SerializerMethodField()
|
||||||
logins_failed = SerializerMethodField()
|
logins_failed_per_1h = SerializerMethodField()
|
||||||
authorizations = SerializerMethodField()
|
authorizations_per_1h = SerializerMethodField()
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins(self, _):
|
def get_logins_per_1h(self, _):
|
||||||
"""Get successful logins per 8 hours for the last 7 days"""
|
"""Get successful logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.LOGIN
|
.filter(action=EventAction.LOGIN)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins_failed(self, _):
|
def get_logins_failed_per_1h(self, _):
|
||||||
"""Get failed logins per 8 hours for the last 7 days"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.LOGIN_FAILED
|
.filter(action=EventAction.LOGIN_FAILED)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_authorizations(self, _):
|
def get_authorizations_per_1h(self, _):
|
||||||
"""Get successful authorizations per 8 hours for the last 7 days"""
|
"""Get successful authorizations per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.AUTHORIZE_APPLICATION
|
.filter(action=EventAction.AUTHORIZE_APPLICATION)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from authentik.core.api.utils import PassiveSerializer
|
|||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
from authentik.tenants.utils import get_tenant
|
|
||||||
|
|
||||||
|
|
||||||
class RuntimeDict(TypedDict):
|
class RuntimeDict(TypedDict):
|
||||||
@ -78,7 +77,7 @@ class SystemSerializer(PassiveSerializer):
|
|||||||
|
|
||||||
def get_tenant(self, request: Request) -> str:
|
def get_tenant(self, request: Request) -> str:
|
||||||
"""Currently active tenant"""
|
"""Currently active tenant"""
|
||||||
return str(get_tenant(request))
|
return str(request._request.tenant)
|
||||||
|
|
||||||
def get_server_time(self, request: Request) -> datetime:
|
def get_server_time(self, request: Request) -> datetime:
|
||||||
"""Current server time"""
|
"""Current server time"""
|
||||||
@ -98,14 +97,8 @@ class SystemView(APIView):
|
|||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminUser]
|
||||||
pagination_class = None
|
pagination_class = None
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
serializer_class = SystemSerializer
|
|
||||||
|
|
||||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
return Response(SystemSerializer(request).data)
|
return Response(SystemSerializer(request).data)
|
||||||
|
|
||||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
|
||||||
def post(self, request: Request) -> Response:
|
|
||||||
"""Get system information."""
|
|
||||||
return Response(SystemSerializer(request).data)
|
|
||||||
|
@ -7,13 +7,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
|
||||||
CharField,
|
|
||||||
ChoiceField,
|
|
||||||
DateTimeField,
|
|
||||||
ListField,
|
|
||||||
SerializerMethodField,
|
|
||||||
)
|
|
||||||
from rest_framework.permissions import IsAdminUser
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -32,7 +26,6 @@ class TaskSerializer(PassiveSerializer):
|
|||||||
task_name = CharField()
|
task_name = CharField()
|
||||||
task_description = CharField()
|
task_description = CharField()
|
||||||
task_finish_timestamp = DateTimeField(source="finish_time")
|
task_finish_timestamp = DateTimeField(source="finish_time")
|
||||||
task_duration = SerializerMethodField()
|
|
||||||
|
|
||||||
status = ChoiceField(
|
status = ChoiceField(
|
||||||
source="result.status.name",
|
source="result.status.name",
|
||||||
@ -40,18 +33,13 @@ class TaskSerializer(PassiveSerializer):
|
|||||||
)
|
)
|
||||||
messages = ListField(source="result.messages")
|
messages = ListField(source="result.messages")
|
||||||
|
|
||||||
def get_task_duration(self, instance: TaskInfo) -> int:
|
def to_representation(self, instance):
|
||||||
"""Get the duration a task took to run"""
|
|
||||||
return max(instance.finish_timestamp - instance.start_timestamp, 0)
|
|
||||||
|
|
||||||
def to_representation(self, instance: TaskInfo):
|
|
||||||
"""When a new version of authentik adds fields to TaskInfo,
|
"""When a new version of authentik adds fields to TaskInfo,
|
||||||
the API will fail with an AttributeError, as the classes
|
the API will fail with an AttributeError, as the classes
|
||||||
are pickled in cache. In that case, just delete the info"""
|
are pickled in cache. In that case, just delete the info"""
|
||||||
try:
|
try:
|
||||||
return super().to_representation(instance)
|
return super().to_representation(instance)
|
||||||
# pylint: disable=broad-except
|
except AttributeError: # pragma: no cover
|
||||||
except Exception: # pragma: no cover
|
|
||||||
if isinstance(self.instance, list):
|
if isinstance(self.instance, list):
|
||||||
for inst in self.instance:
|
for inst in self.instance:
|
||||||
inst.delete()
|
inst.delete()
|
||||||
@ -80,6 +68,7 @@ class TaskViewSet(ViewSet):
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
# pylint: disable=invalid-name
|
||||||
def retrieve(self, request: Request, pk=None) -> Response:
|
def retrieve(self, request: Request, pk=None) -> Response:
|
||||||
"""Get a single system task"""
|
"""Get a single system task"""
|
||||||
task = TaskInfo.by_name(pk)
|
task = TaskInfo.by_name(pk)
|
||||||
@ -110,6 +99,7 @@ class TaskViewSet(ViewSet):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
@action(detail=True, methods=["post"])
|
@action(detail=True, methods=["post"])
|
||||||
|
# pylint: disable=invalid-name
|
||||||
def retry(self, request: Request, pk=None) -> Response:
|
def retry(self, request: Request, pk=None) -> Response:
|
||||||
"""Retry task"""
|
"""Retry task"""
|
||||||
task = TaskInfo.by_name(pk)
|
task = TaskInfo.by_name(pk)
|
||||||
|
@ -8,6 +8,7 @@ from authentik.root.monitoring import monitoring_set
|
|||||||
|
|
||||||
|
|
||||||
@receiver(monitoring_set)
|
@receiver(monitoring_set)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def monitoring_set_workers(sender, **kwargs):
|
def monitoring_set_workers(sender, **kwargs):
|
||||||
"""Set worker gauge"""
|
"""Set worker gauge"""
|
||||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||||
@ -15,7 +16,8 @@ def monitoring_set_workers(sender, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
@receiver(monitoring_set)
|
@receiver(monitoring_set)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def monitoring_set_tasks(sender, **kwargs):
|
def monitoring_set_tasks(sender, **kwargs):
|
||||||
"""Set task gauges"""
|
"""Set task gauges"""
|
||||||
for task in TaskInfo.all().values():
|
for task in TaskInfo.all().values():
|
||||||
task.update_metrics()
|
task.set_prom_metrics()
|
||||||
|
@ -9,7 +9,6 @@ from authentik.blueprints.tests import reconcile_app
|
|||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.core.tasks import clean_expired_models
|
from authentik.core.tasks import clean_expired_models
|
||||||
from authentik.events.monitored_tasks import TaskResultStatus
|
from authentik.events.monitored_tasks import TaskResultStatus
|
||||||
from authentik.lib.generators import generate_id
|
|
||||||
|
|
||||||
|
|
||||||
class TestAdminAPI(TestCase):
|
class TestAdminAPI(TestCase):
|
||||||
@ -17,8 +16,8 @@ class TestAdminAPI(TestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.user = User.objects.create(username=generate_id())
|
self.user = User.objects.create(username="test-user")
|
||||||
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
|
self.group = Group.objects.create(name="superusers", is_superuser=True)
|
||||||
self.group.users.add(self.user)
|
self.group.users.add(self.user)
|
||||||
self.group.save()
|
self.group.save()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
@ -32,17 +32,7 @@ def validate_auth(header: bytes) -> Optional[str]:
|
|||||||
|
|
||||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||||
"""raw_header in the Format of `Bearer ....`"""
|
"""raw_header in the Format of `Bearer ....`"""
|
||||||
user = auth_user_lookup(raw_header)
|
from authentik.providers.oauth2.models import RefreshToken
|
||||||
if not user:
|
|
||||||
return None
|
|
||||||
if not user.is_active:
|
|
||||||
raise AuthenticationFailed("Token invalid/expired")
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
|
||||||
"""raw_header in the Format of `Bearer ....`"""
|
|
||||||
from authentik.providers.oauth2.models import AccessToken
|
|
||||||
|
|
||||||
auth_credentials = validate_auth(raw_header)
|
auth_credentials = validate_auth(raw_header)
|
||||||
if not auth_credentials:
|
if not auth_credentials:
|
||||||
@ -55,8 +45,8 @@ def auth_user_lookup(raw_header: bytes) -> Optional[User]:
|
|||||||
CTX_AUTH_VIA.set("api_token")
|
CTX_AUTH_VIA.set("api_token")
|
||||||
return key_token.user
|
return key_token.user
|
||||||
# then try to auth via JWT
|
# then try to auth via JWT
|
||||||
jwt_token = AccessToken.filter_not_expired(
|
jwt_token = RefreshToken.filter_not_expired(
|
||||||
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
refresh_token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
|
||||||
).first()
|
).first()
|
||||||
if jwt_token:
|
if jwt_token:
|
||||||
# Double-check scopes, since they are saved in a single string
|
# Double-check scopes, since they are saved in a single string
|
||||||
|
@ -7,13 +7,82 @@ API Browser - {{ tenant.branding_title }}
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
|
<script type="module" src="{% static 'dist/rapidoc-min.js' %}"></script>
|
||||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
<script>
|
||||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
function getCookie(name) {
|
||||||
<link rel="icon" href="{{ tenant.branding_favicon }}">
|
let cookieValue = "";
|
||||||
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
|
if (document.cookie && document.cookie !== "") {
|
||||||
|
const cookies = document.cookie.split(";");
|
||||||
|
for (let i = 0; i < cookies.length; i++) {
|
||||||
|
const cookie = cookies[i].trim();
|
||||||
|
// Does this cookie string begin with the name we want?
|
||||||
|
if (cookie.substring(0, name.length + 1) === name + "=") {
|
||||||
|
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cookieValue;
|
||||||
|
}
|
||||||
|
window.addEventListener('DOMContentLoaded', (event) => {
|
||||||
|
const rapidocEl = document.querySelector('rapi-doc');
|
||||||
|
rapidocEl.addEventListener('before-try', (e) => {
|
||||||
|
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
<style>
|
||||||
|
img.logo {
|
||||||
|
width: 100%;
|
||||||
|
padding: 1rem 0.5rem 1.5rem 0.5rem;
|
||||||
|
min-height: 48px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-api-browser schemaPath="{{ path }}"></ak-api-browser>
|
<rapi-doc
|
||||||
|
spec-url="{{ path }}"
|
||||||
|
heading-text=""
|
||||||
|
theme="light"
|
||||||
|
render-style="read"
|
||||||
|
default-schema-tab="schema"
|
||||||
|
primary-color="#fd4b2d"
|
||||||
|
nav-bg-color="#212427"
|
||||||
|
bg-color="#000000"
|
||||||
|
text-color="#000000"
|
||||||
|
nav-text-color="#ffffff"
|
||||||
|
nav-hover-bg-color="#3c3f42"
|
||||||
|
nav-accent-color="#4f5255"
|
||||||
|
nav-hover-text-color="#ffffff"
|
||||||
|
use-path-in-nav-bar="true"
|
||||||
|
nav-item-spacing="relaxed"
|
||||||
|
allow-server-selection="false"
|
||||||
|
show-header="false"
|
||||||
|
allow-spec-url-load="false"
|
||||||
|
allow-spec-file-load="false">
|
||||||
|
<div slot="nav-logo">
|
||||||
|
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
|
||||||
|
</div>
|
||||||
|
</rapi-doc>
|
||||||
|
<script>
|
||||||
|
const rapidoc = document.querySelector("rapi-doc");
|
||||||
|
const matcher = window.matchMedia("(prefers-color-scheme: light)");
|
||||||
|
const changer = (ev) => {
|
||||||
|
const style = getComputedStyle(document.documentElement);
|
||||||
|
let bg, text = "";
|
||||||
|
if (matcher.matches) {
|
||||||
|
bg = style.getPropertyValue('--pf-global--BackgroundColor--light-300');
|
||||||
|
text = style.getPropertyValue('--pf-global--Color--300');
|
||||||
|
} else {
|
||||||
|
bg = style.getPropertyValue('--ak-dark-background');
|
||||||
|
text = style.getPropertyValue('--ak-dark-foreground');
|
||||||
|
}
|
||||||
|
rapidoc.attributes.getNamedItem("bg-color").value = bg.trim();
|
||||||
|
rapidoc.attributes.getNamedItem("text-color").value = text.trim();
|
||||||
|
rapidoc.requestUpdate();
|
||||||
|
};
|
||||||
|
matcher.addEventListener("change", changer);
|
||||||
|
window.addEventListener("load", changer);
|
||||||
|
</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -1,19 +1,18 @@
|
|||||||
"""Test API Authentication"""
|
"""Test API Authentication"""
|
||||||
import json
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.utils import timezone
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from rest_framework.exceptions import AuthenticationFailed
|
from rest_framework.exceptions import AuthenticationFailed
|
||||||
|
|
||||||
from authentik.api.authentication import bearer_auth
|
from authentik.api.authentication import bearer_auth
|
||||||
from authentik.blueprints.tests import reconcile_app
|
from authentik.blueprints.tests import reconcile_app
|
||||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||||
|
|
||||||
|
|
||||||
class TestAPIAuth(TestCase):
|
class TestAPIAuth(TestCase):
|
||||||
@ -37,18 +36,9 @@ class TestAPIAuth(TestCase):
|
|||||||
|
|
||||||
def test_bearer_valid(self):
|
def test_bearer_valid(self):
|
||||||
"""Test valid token"""
|
"""Test valid token"""
|
||||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
|
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
|
||||||
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
|
||||||
|
|
||||||
def test_bearer_valid_deactivated(self):
|
|
||||||
"""Test valid token"""
|
|
||||||
user = create_test_admin_user()
|
|
||||||
user.is_active = False
|
|
||||||
user.save()
|
|
||||||
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
|
|
||||||
with self.assertRaises(AuthenticationFailed):
|
|
||||||
bearer_auth(f"Bearer {token.key}".encode())
|
|
||||||
|
|
||||||
def test_managed_outpost(self):
|
def test_managed_outpost(self):
|
||||||
"""Test managed outpost"""
|
"""Test managed outpost"""
|
||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
@ -65,28 +55,24 @@ class TestAPIAuth(TestCase):
|
|||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||||
)
|
)
|
||||||
refresh = AccessToken.objects.create(
|
refresh = RefreshToken.objects.create(
|
||||||
user=create_test_admin_user(),
|
user=get_anonymous_user(),
|
||||||
provider=provider,
|
provider=provider,
|
||||||
token=generate_id(),
|
refresh_token=generate_id(),
|
||||||
auth_time=timezone.now(),
|
|
||||||
_scope=SCOPE_AUTHENTIK_API,
|
_scope=SCOPE_AUTHENTIK_API,
|
||||||
_id_token=json.dumps({}),
|
|
||||||
)
|
)
|
||||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||||
|
|
||||||
def test_jwt_missing_scope(self):
|
def test_jwt_missing_scope(self):
|
||||||
"""Test valid JWT"""
|
"""Test valid JWT"""
|
||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
|
||||||
)
|
)
|
||||||
refresh = AccessToken.objects.create(
|
refresh = RefreshToken.objects.create(
|
||||||
user=create_test_admin_user(),
|
user=get_anonymous_user(),
|
||||||
provider=provider,
|
provider=provider,
|
||||||
token=generate_id(),
|
refresh_token=generate_id(),
|
||||||
auth_time=timezone.now(),
|
|
||||||
_scope="",
|
_scope="",
|
||||||
_id_token=json.dumps({}),
|
|
||||||
)
|
)
|
||||||
with self.assertRaises(AuthenticationFailed):
|
with self.assertRaises(AuthenticationFailed):
|
||||||
self.assertEqual(bearer_auth(f"Bearer {refresh.token}".encode()), refresh.user)
|
self.assertEqual(bearer_auth(f"Bearer {refresh.refresh_token}".encode()), refresh.user)
|
||||||
|
@ -4,7 +4,6 @@ from guardian.shortcuts import assign_perm
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
from authentik.core.models import Application, User
|
||||||
from authentik.lib.generators import generate_id
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIDecorators(APITestCase):
|
class TestAPIDecorators(APITestCase):
|
||||||
@ -17,7 +16,7 @@ class TestAPIDecorators(APITestCase):
|
|||||||
def test_obj_perm_denied(self):
|
def test_obj_perm_denied(self):
|
||||||
"""Test object perm denied"""
|
"""Test object perm denied"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
app = Application.objects.create(name="denied", slug="denied")
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||||
)
|
)
|
||||||
@ -26,7 +25,7 @@ class TestAPIDecorators(APITestCase):
|
|||||||
def test_other_perm_denied(self):
|
def test_other_perm_denied(self):
|
||||||
"""Test other perm denied"""
|
"""Test other perm denied"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
app = Application.objects.create(name="denied", slug="denied")
|
||||||
assign_perm("authentik_core.view_application", self.user, app)
|
assign_perm("authentik_core.view_application", self.user, app)
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||||
|
@ -29,7 +29,6 @@ class Capabilities(models.TextChoices):
|
|||||||
CAN_GEO_IP = "can_geo_ip"
|
CAN_GEO_IP = "can_geo_ip"
|
||||||
CAN_IMPERSONATE = "can_impersonate"
|
CAN_IMPERSONATE = "can_impersonate"
|
||||||
CAN_DEBUG = "can_debug"
|
CAN_DEBUG = "can_debug"
|
||||||
IS_ENTERPRISE = "is_enterprise"
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||||
@ -71,8 +70,6 @@ class ConfigView(APIView):
|
|||||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||||
if settings.DEBUG: # pragma: no cover
|
if settings.DEBUG: # pragma: no cover
|
||||||
caps.append(Capabilities.CAN_DEBUG)
|
caps.append(Capabilities.CAN_DEBUG)
|
||||||
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
|
||||||
caps.append(Capabilities.IS_ENTERPRISE)
|
|
||||||
return caps
|
return caps
|
||||||
|
|
||||||
def get_config(self) -> ConfigSerializer:
|
def get_config(self) -> ConfigSerializer:
|
||||||
|
@ -33,7 +33,6 @@ from authentik.flows.api.flows import FlowViewSet
|
|||||||
from authentik.flows.api.stages import StageViewSet
|
from authentik.flows.api.stages import StageViewSet
|
||||||
from authentik.flows.views.executor import FlowExecutorView
|
from authentik.flows.views.executor import FlowExecutorView
|
||||||
from authentik.flows.views.inspector import FlowInspectorView
|
from authentik.flows.views.inspector import FlowInspectorView
|
||||||
from authentik.interfaces.api import InterfaceViewSet
|
|
||||||
from authentik.outposts.api.outposts import OutpostViewSet
|
from authentik.outposts.api.outposts import OutpostViewSet
|
||||||
from authentik.outposts.api.service_connections import (
|
from authentik.outposts.api.service_connections import (
|
||||||
DockerServiceConnectionViewSet,
|
DockerServiceConnectionViewSet,
|
||||||
@ -46,22 +45,16 @@ from authentik.policies.dummy.api import DummyPolicyViewSet
|
|||||||
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
|
||||||
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
||||||
|
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
||||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||||
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||||
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||||
from authentik.providers.oauth2.api.tokens import (
|
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||||
AccessTokenViewSet,
|
|
||||||
AuthorizationCodeViewSet,
|
|
||||||
RefreshTokenViewSet,
|
|
||||||
)
|
|
||||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||||
from authentik.providers.radius.api import RadiusOutpostConfigViewSet, RadiusProviderViewSet
|
|
||||||
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||||
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||||
from authentik.providers.scim.api.property_mapping import SCIMMappingViewSet
|
|
||||||
from authentik.providers.scim.api.providers import SCIMProviderViewSet
|
|
||||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||||
@ -124,15 +117,12 @@ router.register("core/user_consent", UserConsentViewSet)
|
|||||||
router.register("core/tokens", TokenViewSet)
|
router.register("core/tokens", TokenViewSet)
|
||||||
router.register("core/tenants", TenantViewSet)
|
router.register("core/tenants", TenantViewSet)
|
||||||
|
|
||||||
router.register("interfaces", InterfaceViewSet)
|
|
||||||
|
|
||||||
router.register("outposts/instances", OutpostViewSet)
|
router.register("outposts/instances", OutpostViewSet)
|
||||||
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
|
router.register("outposts/service_connections/all", ServiceConnectionViewSet)
|
||||||
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||||
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
router.register("outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet)
|
||||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||||
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||||
router.register("outposts/radius", RadiusOutpostConfigViewSet)
|
|
||||||
|
|
||||||
router.register("flows/instances", FlowViewSet)
|
router.register("flows/instances", FlowViewSet)
|
||||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||||
@ -160,6 +150,7 @@ router.register("policies/all", PolicyViewSet)
|
|||||||
router.register("policies/bindings", PolicyBindingViewSet)
|
router.register("policies/bindings", PolicyBindingViewSet)
|
||||||
router.register("policies/expression", ExpressionPolicyViewSet)
|
router.register("policies/expression", ExpressionPolicyViewSet)
|
||||||
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
|
||||||
|
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
|
||||||
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
|
||||||
router.register("policies/password", PasswordPolicyViewSet)
|
router.register("policies/password", PasswordPolicyViewSet)
|
||||||
router.register("policies/reputation/scores", ReputationViewSet)
|
router.register("policies/reputation/scores", ReputationViewSet)
|
||||||
@ -170,19 +161,15 @@ router.register("providers/ldap", LDAPProviderViewSet)
|
|||||||
router.register("providers/proxy", ProxyProviderViewSet)
|
router.register("providers/proxy", ProxyProviderViewSet)
|
||||||
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
||||||
router.register("providers/saml", SAMLProviderViewSet)
|
router.register("providers/saml", SAMLProviderViewSet)
|
||||||
router.register("providers/scim", SCIMProviderViewSet)
|
|
||||||
router.register("providers/radius", RadiusProviderViewSet)
|
|
||||||
|
|
||||||
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
router.register("oauth2/authorization_codes", AuthorizationCodeViewSet)
|
||||||
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
router.register("oauth2/refresh_tokens", RefreshTokenViewSet)
|
||||||
router.register("oauth2/access_tokens", AccessTokenViewSet)
|
|
||||||
|
|
||||||
router.register("propertymappings/all", PropertyMappingViewSet)
|
router.register("propertymappings/all", PropertyMappingViewSet)
|
||||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||||
router.register("propertymappings/scim", SCIMMappingViewSet)
|
|
||||||
|
|
||||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Serializer mixin for managed models"""
|
"""Serializer mixin for managed models"""
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
@ -12,7 +11,6 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
||||||
from authentik.blueprints.v1.importer import Importer
|
|
||||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
@ -42,22 +40,8 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||||||
raise ValidationError(exc) from exc
|
raise ValidationError(exc) from exc
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def validate_content(self, content: str) -> str:
|
|
||||||
"""Ensure content (if set) is a valid blueprint"""
|
|
||||||
if content == "":
|
|
||||||
return content
|
|
||||||
context = self.instance.context if self.instance else {}
|
|
||||||
valid, logs = Importer(content, context).validate()
|
|
||||||
if not valid:
|
|
||||||
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
|
|
||||||
return content
|
|
||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
|
||||||
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
|
|
||||||
raise ValidationError(_("Either path or content must be set."))
|
|
||||||
return super().validate(attrs)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = BlueprintInstance
|
model = BlueprintInstance
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -70,7 +54,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||||||
"enabled",
|
"enabled",
|
||||||
"managed_models",
|
"managed_models",
|
||||||
"metadata",
|
"metadata",
|
||||||
"content",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"status": {"read_only": True},
|
"status": {"read_only": True},
|
||||||
|
@ -55,12 +55,11 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
|||||||
"""Load v1 tasks"""
|
"""Load v1 tasks"""
|
||||||
self.import_module("authentik.blueprints.v1.tasks")
|
self.import_module("authentik.blueprints.v1.tasks")
|
||||||
|
|
||||||
def reconcile_blueprints_discovery(self):
|
def reconcile_blueprints_discover(self):
|
||||||
"""Run blueprint discovery"""
|
"""Run blueprint discovery"""
|
||||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
from authentik.blueprints.v1.tasks import blueprints_discover
|
||||||
|
|
||||||
blueprints_discovery.delay()
|
blueprints_discover.delay()
|
||||||
clear_failed_blueprints.delay()
|
|
||||||
|
|
||||||
def import_models(self):
|
def import_models(self):
|
||||||
super().import_models()
|
super().import_models()
|
||||||
|
@ -19,8 +19,10 @@ class Command(BaseCommand):
|
|||||||
for blueprint_path in options.get("blueprints", []):
|
for blueprint_path in options.get("blueprints", []):
|
||||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||||
importer = Importer(content)
|
importer = Importer(content)
|
||||||
valid, _ = importer.validate()
|
valid, logs = importer.validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
|
for log in logs:
|
||||||
|
getattr(LOGGER, log.pop("log_level"))(**log)
|
||||||
self.stderr.write("blueprint invalid")
|
self.stderr.write("blueprint invalid")
|
||||||
sys_exit(1)
|
sys_exit(1)
|
||||||
importer.apply()
|
importer.apply()
|
||||||
|
@ -6,6 +6,7 @@ from pathlib import Path
|
|||||||
import django.contrib.postgres.fields
|
import django.contrib.postgres.fields
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from django.apps.registry import Apps
|
from django.apps.registry import Apps
|
||||||
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
from yaml import load
|
from yaml import load
|
||||||
@ -14,7 +15,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
|
|||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
|
||||||
"""Check if blueprint should be imported"""
|
"""Check if blueprint should be imported"""
|
||||||
from authentik.blueprints.models import BlueprintInstanceStatus
|
from authentik.blueprints.models import BlueprintInstanceStatus
|
||||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata
|
||||||
@ -70,6 +71,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
initial = True
|
initial = True
|
||||||
|
|
||||||
dependencies = [("authentik_flows", "0001_initial")]
|
dependencies = [("authentik_flows", "0001_initial")]
|
||||||
@ -84,12 +86,7 @@ class Migration(migrations.Migration):
|
|||||||
"managed",
|
"managed",
|
||||||
models.TextField(
|
models.TextField(
|
||||||
default=None,
|
default=None,
|
||||||
help_text=(
|
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||||
"Objects which are managed by authentik. These objects are created and"
|
|
||||||
" updated automatically. This is flag only indicates that an object can"
|
|
||||||
" be overwritten by migrations. You can still modify the objects via"
|
|
||||||
" the API, but expect changes to be overwritten in a later update."
|
|
||||||
),
|
|
||||||
null=True,
|
null=True,
|
||||||
unique=True,
|
unique=True,
|
||||||
verbose_name="Managed by authentik",
|
verbose_name="Managed by authentik",
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
# Generated by Django 4.1.5 on 2023-01-10 19:48
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_blueprints", "0001_initial"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="blueprintinstance",
|
|
||||||
name="content",
|
|
||||||
field=models.TextField(blank=True, default=""),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="blueprintinstance",
|
|
||||||
name="path",
|
|
||||||
field=models.TextField(blank=True, default=""),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,18 +1,30 @@
|
|||||||
"""blueprint models"""
|
"""blueprint models"""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from urllib.parse import urlparse
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from opencontainers.distribution.reggie import (
|
||||||
|
NewClient,
|
||||||
|
WithDebug,
|
||||||
|
WithDefaultName,
|
||||||
|
WithDigest,
|
||||||
|
WithReference,
|
||||||
|
WithUserAgent,
|
||||||
|
WithUsernamePassword,
|
||||||
|
)
|
||||||
|
from requests.exceptions import RequestException
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
|
from authentik.lib.utils.http import authentik_user_agent
|
||||||
|
|
||||||
|
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
@ -29,15 +41,18 @@ class ManagedModel(models.Model):
|
|||||||
null=True,
|
null=True,
|
||||||
verbose_name=_("Managed by authentik"),
|
verbose_name=_("Managed by authentik"),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
|
(
|
||||||
"Objects which are managed by authentik. These objects are created and updated "
|
"Objects which are managed by authentik. These objects are created and updated "
|
||||||
"automatically. This is flag only indicates that an object can be overwritten by "
|
"automatically. This is flag only indicates that an object can be overwritten by "
|
||||||
"migrations. You can still modify the objects via the API, but expect changes "
|
"migrations. You can still modify the objects via the API, but expect changes "
|
||||||
"to be overwritten in a later update."
|
"to be overwritten in a later update."
|
||||||
|
)
|
||||||
),
|
),
|
||||||
unique=True,
|
unique=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
@ -59,8 +74,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
|
|
||||||
name = models.TextField()
|
name = models.TextField()
|
||||||
metadata = models.JSONField(default=dict)
|
metadata = models.JSONField(default=dict)
|
||||||
path = models.TextField(default="", blank=True)
|
path = models.TextField()
|
||||||
content = models.TextField(default="", blank=True)
|
|
||||||
context = models.JSONField(default=dict)
|
context = models.JSONField(default=dict)
|
||||||
last_applied = models.DateTimeField(auto_now=True)
|
last_applied = models.DateTimeField(auto_now=True)
|
||||||
last_applied_hash = models.TextField()
|
last_applied_hash = models.TextField()
|
||||||
@ -72,29 +86,60 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
|
|
||||||
def retrieve_oci(self) -> str:
|
def retrieve_oci(self) -> str:
|
||||||
"""Get blueprint from an OCI registry"""
|
"""Get blueprint from an OCI registry"""
|
||||||
client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
|
url = urlparse(self.path)
|
||||||
|
ref = "latest"
|
||||||
|
path = url.path[1:]
|
||||||
|
if ":" in url.path:
|
||||||
|
path, _, ref = path.partition(":")
|
||||||
|
client = NewClient(
|
||||||
|
f"https://{url.hostname}",
|
||||||
|
WithUserAgent(authentik_user_agent()),
|
||||||
|
WithUsernamePassword(url.username, url.password),
|
||||||
|
WithDefaultName(path),
|
||||||
|
WithDebug(True),
|
||||||
|
)
|
||||||
|
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
|
||||||
|
manifest_request = client.NewRequest(
|
||||||
|
"GET",
|
||||||
|
"/v2/<name>/manifests/<reference>",
|
||||||
|
WithReference(ref),
|
||||||
|
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
||||||
try:
|
try:
|
||||||
manifests = client.fetch_manifests()
|
manifest_response = client.Do(manifest_request)
|
||||||
return client.fetch_blobs(manifests)
|
manifest_response.raise_for_status()
|
||||||
except OCIException as exc:
|
except RequestException as exc:
|
||||||
raise BlueprintRetrievalFailed(exc) from exc
|
raise BlueprintRetrievalFailed(exc) from exc
|
||||||
|
manifest = manifest_response.json()
|
||||||
|
if "errors" in manifest:
|
||||||
|
raise BlueprintRetrievalFailed(manifest["errors"])
|
||||||
|
|
||||||
def retrieve_file(self) -> str:
|
blob = None
|
||||||
"""Get blueprint from path"""
|
for layer in manifest.get("layers", []):
|
||||||
|
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
||||||
|
blob = layer.get("digest")
|
||||||
|
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
|
||||||
|
if not blob:
|
||||||
|
raise BlueprintRetrievalFailed("Blob not found")
|
||||||
|
|
||||||
|
blob_request = client.NewRequest(
|
||||||
|
"GET",
|
||||||
|
"/v2/<name>/blobs/<digest>",
|
||||||
|
WithDigest(blob),
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
blob_response = client.Do(blob_request)
|
||||||
with full_path.open("r", encoding="utf-8") as _file:
|
blob_response.raise_for_status()
|
||||||
return _file.read()
|
return blob_response.text
|
||||||
except (IOError, OSError) as exc:
|
except RequestException as exc:
|
||||||
raise BlueprintRetrievalFailed(exc) from exc
|
raise BlueprintRetrievalFailed(exc) from exc
|
||||||
|
|
||||||
def retrieve(self) -> str:
|
def retrieve(self) -> str:
|
||||||
"""Retrieve blueprint contents"""
|
"""Retrieve blueprint contents"""
|
||||||
if self.path.startswith("oci://"):
|
if self.path.startswith("oci://"):
|
||||||
return self.retrieve_oci()
|
return self.retrieve_oci()
|
||||||
if self.path != "":
|
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||||
return self.retrieve_file()
|
with full_path.open("r", encoding="utf-8") as _file:
|
||||||
return self.content
|
return _file.read()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Serializer:
|
def serializer(self) -> Serializer:
|
||||||
@ -106,6 +151,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
return f"Blueprint Instance {self.name}"
|
return f"Blueprint Instance {self.name}"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
verbose_name = _("Blueprint Instance")
|
verbose_name = _("Blueprint Instance")
|
||||||
verbose_name_plural = _("Blueprint Instances")
|
verbose_name_plural = _("Blueprint Instances")
|
||||||
unique_together = (
|
unique_together = (
|
||||||
|
@ -5,13 +5,8 @@ from authentik.lib.utils.time import fqdn_rand
|
|||||||
|
|
||||||
CELERY_BEAT_SCHEDULE = {
|
CELERY_BEAT_SCHEDULE = {
|
||||||
"blueprints_v1_discover": {
|
"blueprints_v1_discover": {
|
||||||
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
|
"task": "authentik.blueprints.v1.tasks.blueprints_discover",
|
||||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
},
|
},
|
||||||
"blueprints_v1_cleanup": {
|
|
||||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
|
||||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
|
||||||
"options": {"queue": "authentik_scheduled"},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Blueprint helpers"""
|
"""Blueprint helpers"""
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
from pathlib import Path
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
@ -44,3 +45,13 @@ def reconcile_app(app_name: str):
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
return wrapper_outer
|
return wrapper_outer
|
||||||
|
|
||||||
|
|
||||||
|
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||||
|
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||||
|
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||||
|
fixture = _fixture.read()
|
||||||
|
try:
|
||||||
|
return fixture % kwargs
|
||||||
|
except TypeError:
|
||||||
|
return fixture
|
||||||
|
@ -4,7 +4,6 @@ entries:
|
|||||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||||
model: authentik_stages_prompt.prompt
|
model: authentik_stages_prompt.prompt
|
||||||
attrs:
|
attrs:
|
||||||
name: qwerweqrq
|
|
||||||
field_key: username
|
field_key: username
|
||||||
label: Username
|
label: Username
|
||||||
type: username
|
type: username
|
||||||
|
51
authentik/blueprints/tests/fixtures/tags.yaml
vendored
51
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@ -3,12 +3,6 @@ context:
|
|||||||
foo: bar
|
foo: bar
|
||||||
policy_property: name
|
policy_property: name
|
||||||
policy_property_value: foo-bar-baz-qux
|
policy_property_value: foo-bar-baz-qux
|
||||||
sequence:
|
|
||||||
- foo
|
|
||||||
- bar
|
|
||||||
mapping:
|
|
||||||
key1: value
|
|
||||||
key2: 2
|
|
||||||
entries:
|
entries:
|
||||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||||
state: !Format ["%s", present]
|
state: !Format ["%s", present]
|
||||||
@ -25,7 +19,7 @@ entries:
|
|||||||
[slug, default-source-authentication],
|
[slug, default-source-authentication],
|
||||||
]
|
]
|
||||||
enrollment_flow:
|
enrollment_flow:
|
||||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
!Find [authentik_flows.Flow, [slug, default-source-enrollment]]
|
||||||
- attrs:
|
- attrs:
|
||||||
expression: return True
|
expression: return True
|
||||||
identifiers:
|
identifiers:
|
||||||
@ -98,49 +92,6 @@ entries:
|
|||||||
]
|
]
|
||||||
if_true_simple: !If [!Context foo, true, text]
|
if_true_simple: !If [!Context foo, true, text]
|
||||||
if_false_simple: !If [null, false, 2]
|
if_false_simple: !If [null, false, 2]
|
||||||
enumerate_mapping_to_mapping: !Enumerate [
|
|
||||||
!Context mapping,
|
|
||||||
MAP,
|
|
||||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
|
||||||
]
|
|
||||||
enumerate_mapping_to_sequence: !Enumerate [
|
|
||||||
!Context mapping,
|
|
||||||
SEQ,
|
|
||||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
|
||||||
]
|
|
||||||
enumerate_sequence_to_sequence: !Enumerate [
|
|
||||||
!Context sequence,
|
|
||||||
SEQ,
|
|
||||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
|
||||||
]
|
|
||||||
enumerate_sequence_to_mapping: !Enumerate [
|
|
||||||
!Context sequence,
|
|
||||||
MAP,
|
|
||||||
[!Format ["index: %d", !Index 0], !Value 0]
|
|
||||||
]
|
|
||||||
nested_complex_enumeration: !Enumerate [
|
|
||||||
!Context sequence,
|
|
||||||
MAP,
|
|
||||||
[
|
|
||||||
!Index 0,
|
|
||||||
!Enumerate [
|
|
||||||
!Context mapping,
|
|
||||||
MAP,
|
|
||||||
[
|
|
||||||
!Format ["%s", !Index 0],
|
|
||||||
[
|
|
||||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
|
||||||
{
|
|
||||||
outer_value: !Value 1,
|
|
||||||
outer_index: !Index 1,
|
|
||||||
middle_value: !Value 0,
|
|
||||||
middle_index: !Index 0
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
identifiers:
|
identifiers:
|
||||||
name: test
|
name: test
|
||||||
conditions:
|
conditions:
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
|
from authentik.blueprints.models import OCI_MEDIA_TYPE, BlueprintInstance, BlueprintRetrievalFailed
|
||||||
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
|
|
||||||
|
|
||||||
|
|
||||||
class TestBlueprintOCI(TransactionTestCase):
|
class TestBlueprintOCI(TransactionTestCase):
|
||||||
|
@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
|
|||||||
class TestPackaged(TransactionTestCase):
|
class TestPackaged(TransactionTestCase):
|
||||||
"""Empty class, test methods are added dynamically"""
|
"""Empty class, test methods are added dynamically"""
|
||||||
|
|
||||||
@apply_blueprint("default/default-tenant.yaml")
|
@apply_blueprint("default/90-default-tenant.yaml")
|
||||||
def test_decorator_static(self):
|
def test_decorator_static(self):
|
||||||
"""Test @apply_blueprint decorator"""
|
"""Test @apply_blueprint decorator"""
|
||||||
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())
|
||||||
|
@ -3,12 +3,12 @@ from os import environ
|
|||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
from authentik.blueprints.v1.exporter import FlowExporter
|
from authentik.blueprints.v1.exporter import FlowExporter
|
||||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||||
from authentik.core.models import Group
|
from authentik.core.models import Group
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.tests.utils import load_fixture
|
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
from authentik.sources.oauth.models import OAuthSource
|
from authentik.sources.oauth.models import OAuthSource
|
||||||
@ -24,15 +24,19 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
importer = Importer('{"version": 3}')
|
importer = Importer('{"version": 3}')
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer(
|
importer = Importer(
|
||||||
|
(
|
||||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||||
'"model": "authentik_core.User"}]}'
|
'"model": "authentik_core.User"}]}'
|
||||||
)
|
)
|
||||||
|
)
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer(
|
importer = Importer(
|
||||||
|
(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||||
'"identifiers": {}, '
|
'"identifiers": {}, '
|
||||||
'"model": "authentik_core.Group"}]}'
|
'"model": "authentik_core.Group"}]}'
|
||||||
)
|
)
|
||||||
|
)
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
|
|
||||||
def test_validated_import_dict_identifiers(self):
|
def test_validated_import_dict_identifiers(self):
|
||||||
@ -55,10 +59,12 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer(
|
importer = Importer(
|
||||||
|
(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||||
)
|
)
|
||||||
|
)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
@ -113,14 +119,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
"""Test export and import it twice"""
|
"""Test export and import it twice"""
|
||||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||||
|
|
||||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
count_before = Prompt.objects.filter(field_key="username").count()
|
count_before = Prompt.objects.filter(field_key="username").count()
|
||||||
self.assertEqual(count_initial + 1, count_before)
|
self.assertEqual(count_initial + 1, count_before)
|
||||||
|
|
||||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||||
@ -130,7 +136,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||||
Group.objects.filter(name="test").delete()
|
Group.objects.filter(name="test").delete()
|
||||||
environ["foo"] = generate_id()
|
environ["foo"] = generate_id()
|
||||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||||
@ -156,61 +162,6 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||||
"if_true_simple": True,
|
"if_true_simple": True,
|
||||||
"if_false_simple": 2,
|
"if_false_simple": 2,
|
||||||
"enumerate_mapping_to_mapping": {
|
|
||||||
"prefix-key1": "other-prefix-value",
|
|
||||||
"prefix-key2": "other-prefix-2",
|
|
||||||
},
|
|
||||||
"enumerate_mapping_to_sequence": [
|
|
||||||
"prefixed-pair-key1-value",
|
|
||||||
"prefixed-pair-key2-2",
|
|
||||||
],
|
|
||||||
"enumerate_sequence_to_sequence": [
|
|
||||||
"prefixed-items-0-foo",
|
|
||||||
"prefixed-items-1-bar",
|
|
||||||
],
|
|
||||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
|
||||||
"nested_complex_enumeration": {
|
|
||||||
"0": {
|
|
||||||
"key1": [
|
|
||||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
|
||||||
{
|
|
||||||
"outer_value": "foo",
|
|
||||||
"outer_index": 0,
|
|
||||||
"middle_value": "value",
|
|
||||||
"middle_index": "key1",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"key2": [
|
|
||||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
|
||||||
{
|
|
||||||
"outer_value": "foo",
|
|
||||||
"outer_index": 0,
|
|
||||||
"middle_value": 2,
|
|
||||||
"middle_index": "key2",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
"1": {
|
|
||||||
"key1": [
|
|
||||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
|
||||||
{
|
|
||||||
"outer_value": "bar",
|
|
||||||
"outer_index": 1,
|
|
||||||
"middle_value": "value",
|
|
||||||
"middle_index": "key1",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"key2": [
|
|
||||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
|
||||||
{
|
|
||||||
"outer_value": "bar",
|
|
||||||
"outer_index": 1,
|
|
||||||
"middle_value": 2,
|
|
||||||
"middle_index": "key2",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -256,21 +207,15 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
with transaction_rollback():
|
with transaction_rollback():
|
||||||
# First stage fields
|
# First stage fields
|
||||||
username_prompt = Prompt.objects.create(
|
username_prompt = Prompt.objects.create(
|
||||||
name=generate_id(),
|
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
|
||||||
field_key="username",
|
|
||||||
label="Username",
|
|
||||||
order=0,
|
|
||||||
type=FieldTypes.TEXT,
|
|
||||||
)
|
)
|
||||||
password = Prompt.objects.create(
|
password = Prompt.objects.create(
|
||||||
name=generate_id(),
|
|
||||||
field_key="password",
|
field_key="password",
|
||||||
label="Password",
|
label="Password",
|
||||||
order=1,
|
order=1,
|
||||||
type=FieldTypes.PASSWORD,
|
type=FieldTypes.PASSWORD,
|
||||||
)
|
)
|
||||||
password_repeat = Prompt.objects.create(
|
password_repeat = Prompt.objects.create(
|
||||||
name=generate_id(),
|
|
||||||
field_key="password_repeat",
|
field_key="password_repeat",
|
||||||
label="Password (repeat)",
|
label="Password (repeat)",
|
||||||
order=2,
|
order=2,
|
||||||
|
@ -43,28 +43,3 @@ class TestBlueprintsV1API(APITestCase):
|
|||||||
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_api_blank(self):
|
|
||||||
"""Test blank"""
|
|
||||||
res = self.client.post(
|
|
||||||
reverse("authentik_api:blueprintinstance-list"),
|
|
||||||
data={
|
|
||||||
"name": "foo",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(res.status_code, 400)
|
|
||||||
self.assertJSONEqual(
|
|
||||||
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_api_content(self):
|
|
||||||
"""Test blank"""
|
|
||||||
res = self.client.post(
|
|
||||||
reverse("authentik_api:blueprintinstance-list"),
|
|
||||||
data={
|
|
||||||
"name": "foo",
|
|
||||||
"content": '{"version": 3}',
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(res.status_code, 400)
|
|
||||||
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.tests.utils import load_fixture
|
|
||||||
|
|
||||||
|
|
||||||
class TestBlueprintsV1Conditions(TransactionTestCase):
|
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||||
@ -14,7 +14,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||||||
"""Test conditions fulfilled"""
|
"""Test conditions fulfilled"""
|
||||||
flow_slug1 = generate_id()
|
flow_slug1 = generate_id()
|
||||||
flow_slug2 = generate_id()
|
flow_slug2 = generate_id()
|
||||||
import_yaml = load_fixture(
|
import_yaml = load_yaml_fixture(
|
||||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||||||
"""Test conditions not fulfilled"""
|
"""Test conditions not fulfilled"""
|
||||||
flow_slug1 = generate_id()
|
flow_slug1 = generate_id()
|
||||||
flow_slug2 = generate_id()
|
flow_slug2 = generate_id()
|
||||||
import_yaml = load_fixture(
|
import_yaml = load_yaml_fixture(
|
||||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
from authentik.blueprints.v1.importer import Importer
|
from authentik.blueprints.v1.importer import Importer
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.tests.utils import load_fixture
|
|
||||||
|
|
||||||
|
|
||||||
class TestBlueprintsV1State(TransactionTestCase):
|
class TestBlueprintsV1State(TransactionTestCase):
|
||||||
@ -13,7 +13,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
def test_state_present(self):
|
def test_state_present(self):
|
||||||
"""Test state present"""
|
"""Test state present"""
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
@ -39,7 +39,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
def test_state_created(self):
|
def test_state_created(self):
|
||||||
"""Test state created"""
|
"""Test state created"""
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
@ -65,7 +65,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
def test_state_absent(self):
|
def test_state_absent(self):
|
||||||
"""Test state absent"""
|
"""Test state absent"""
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
@ -74,7 +74,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
self.assertEqual(flow.slug, flow_slug)
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||||
importer = Importer(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
@ -6,7 +6,7 @@ from django.test import TransactionTestCase
|
|||||||
from yaml import dump
|
from yaml import dump
|
||||||
|
|
||||||
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
from authentik.blueprints.models import BlueprintInstance, BlueprintInstanceStatus
|
||||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discovery, blueprints_find
|
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_discover, blueprints_find
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
file.seek(0)
|
file.seek(0)
|
||||||
file_hash = sha512(file.read().encode()).hexdigest()
|
file_hash = sha512(file.read().encode()).hexdigest()
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -81,7 +81,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
blueprint.last_applied_hash,
|
blueprint.last_applied_hash,
|
||||||
@ -106,7 +106,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discovery() # pylint: disable=no-value-for-parameter
|
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||||
blueprint.refresh_from_db()
|
blueprint.refresh_from_db()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
blueprint.last_applied_hash,
|
blueprint.last_applied_hash,
|
||||||
|
@ -1,15 +1,13 @@
|
|||||||
"""transfer common classes"""
|
"""transfer common classes"""
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from copy import copy
|
|
||||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from operator import ixor
|
from operator import ixor
|
||||||
from os import getenv
|
from os import getenv
|
||||||
from typing import Any, Iterable, Literal, Mapping, Optional, Union
|
from typing import Any, Literal, Optional, Union
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from deepmerge import always_merger
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db.models import Model, Q
|
from django.db.models import Model, Q
|
||||||
from rest_framework.fields import Field
|
from rest_framework.fields import Field
|
||||||
@ -66,13 +64,11 @@ class BlueprintEntry:
|
|||||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
id: Optional[str] = None
|
id: Optional[str] = None
|
||||||
|
|
||||||
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
|
||||||
|
|
||||||
def __post_init__(self, *args, **kwargs) -> None:
|
|
||||||
self.__tag_contexts: list["YAMLTagContext"] = []
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||||
@ -89,46 +85,17 @@ class BlueprintEntry:
|
|||||||
attrs=all_attrs,
|
attrs=all_attrs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_tag_context(
|
|
||||||
self,
|
|
||||||
depth: int = 0,
|
|
||||||
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
|
|
||||||
) -> "YAMLTagContext":
|
|
||||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
|
||||||
if depth < 0:
|
|
||||||
raise ValueError("depth must be a positive number or zero")
|
|
||||||
|
|
||||||
if context_tag_type:
|
|
||||||
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
|
|
||||||
else:
|
|
||||||
contexts = self.__tag_contexts
|
|
||||||
|
|
||||||
try:
|
|
||||||
return contexts[-(depth + 1)]
|
|
||||||
except IndexError:
|
|
||||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
|
|
||||||
|
|
||||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||||
"""Check if we have any special tags that need handling"""
|
"""Check if we have any special tags that need handling"""
|
||||||
val = copy(value)
|
|
||||||
|
|
||||||
if isinstance(value, YAMLTagContext):
|
|
||||||
self.__tag_contexts.append(value)
|
|
||||||
|
|
||||||
if isinstance(value, YAMLTag):
|
if isinstance(value, YAMLTag):
|
||||||
val = value.resolve(self, blueprint)
|
return value.resolve(self, blueprint)
|
||||||
|
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
for key, inner_value in value.items():
|
for key, inner_value in value.items():
|
||||||
val[key] = self.tag_resolver(inner_value, blueprint)
|
value[key] = self.tag_resolver(inner_value, blueprint)
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
for idx, inner_value in enumerate(value):
|
for idx, inner_value in enumerate(value):
|
||||||
val[idx] = self.tag_resolver(inner_value, blueprint)
|
value[idx] = self.tag_resolver(inner_value, blueprint)
|
||||||
|
return value
|
||||||
if isinstance(value, YAMLTagContext):
|
|
||||||
self.__tag_contexts.pop()
|
|
||||||
|
|
||||||
return val
|
|
||||||
|
|
||||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||||
@ -178,19 +145,12 @@ class YAMLTag:
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class YAMLTagContext:
|
|
||||||
"""Base class for all YAML Tag Contexts"""
|
|
||||||
|
|
||||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
"""Implement yaml tag context logic"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class KeyOf(YAMLTag):
|
class KeyOf(YAMLTag):
|
||||||
"""Reference another object by their ID"""
|
"""Reference another object by their ID"""
|
||||||
|
|
||||||
id_from: str
|
id_from: str
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.id_from = node.value
|
self.id_from = node.value
|
||||||
@ -217,6 +177,7 @@ class Env(YAMLTag):
|
|||||||
key: str
|
key: str
|
||||||
default: Optional[Any]
|
default: Optional[Any]
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.default = None
|
self.default = None
|
||||||
@ -236,6 +197,7 @@ class Context(YAMLTag):
|
|||||||
key: str
|
key: str
|
||||||
default: Optional[Any]
|
default: Optional[Any]
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.default = None
|
self.default = None
|
||||||
@ -258,6 +220,7 @@ class Format(YAMLTag):
|
|||||||
format_string: str
|
format_string: str
|
||||||
args: list[Any]
|
args: list[Any]
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.format_string = node.value[0].value
|
self.format_string = node.value[0].value
|
||||||
@ -282,12 +245,15 @@ class Format(YAMLTag):
|
|||||||
class Find(YAMLTag):
|
class Find(YAMLTag):
|
||||||
"""Find any object"""
|
"""Find any object"""
|
||||||
|
|
||||||
model_name: str | YAMLTag
|
model_name: str
|
||||||
conditions: list[list]
|
conditions: list[list]
|
||||||
|
|
||||||
|
model_class: type[Model]
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.model_name = loader.construct_object(node.value[0])
|
self.model_name = node.value[0].value
|
||||||
|
self.model_class = apps.get_model(*self.model_name.split("."))
|
||||||
self.conditions = []
|
self.conditions = []
|
||||||
for raw_node in node.value[1:]:
|
for raw_node in node.value[1:]:
|
||||||
values = []
|
values = []
|
||||||
@ -296,13 +262,6 @@ class Find(YAMLTag):
|
|||||||
self.conditions.append(values)
|
self.conditions.append(values)
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
if isinstance(self.model_name, YAMLTag):
|
|
||||||
model_name = self.model_name.resolve(entry, blueprint)
|
|
||||||
else:
|
|
||||||
model_name = self.model_name
|
|
||||||
|
|
||||||
model_class = apps.get_model(*model_name.split("."))
|
|
||||||
|
|
||||||
query = Q()
|
query = Q()
|
||||||
for cond in self.conditions:
|
for cond in self.conditions:
|
||||||
if isinstance(cond[0], YAMLTag):
|
if isinstance(cond[0], YAMLTag):
|
||||||
@ -314,7 +273,7 @@ class Find(YAMLTag):
|
|||||||
else:
|
else:
|
||||||
query_value = cond[1]
|
query_value = cond[1]
|
||||||
query &= Q(**{query_key: query_value})
|
query &= Q(**{query_key: query_value})
|
||||||
instance = model_class.objects.filter(query).first()
|
instance = self.model_class.objects.filter(query).first()
|
||||||
if instance:
|
if instance:
|
||||||
return instance.pk
|
return instance.pk
|
||||||
return None
|
return None
|
||||||
@ -337,6 +296,7 @@ class Condition(YAMLTag):
|
|||||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.mode = node.value[0].value
|
self.mode = node.value[0].value
|
||||||
@ -369,6 +329,7 @@ class If(YAMLTag):
|
|||||||
when_true: Any
|
when_true: Any
|
||||||
when_false: Any
|
when_false: Any
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.condition = loader.construct_object(node.value[0])
|
self.condition = loader.construct_object(node.value[0])
|
||||||
@ -390,133 +351,6 @@ class If(YAMLTag):
|
|||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
class Enumerate(YAMLTag, YAMLTagContext):
|
|
||||||
"""Iterate over an iterable."""
|
|
||||||
|
|
||||||
iterable: YAMLTag | Iterable
|
|
||||||
item_body: Any
|
|
||||||
output_body: Literal["SEQ", "MAP"]
|
|
||||||
|
|
||||||
_OUTPUT_BODIES = {
|
|
||||||
"SEQ": (list, lambda a, b: [*a, b]),
|
|
||||||
"MAP": (
|
|
||||||
dict,
|
|
||||||
lambda a, b: always_merger.merge(
|
|
||||||
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
|
|
||||||
),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.iterable = loader.construct_object(node.value[0])
|
|
||||||
self.output_body = node.value[1].value
|
|
||||||
self.item_body = loader.construct_object(node.value[2])
|
|
||||||
self.__current_context: tuple[Any, Any] = tuple()
|
|
||||||
|
|
||||||
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
return self.__current_context
|
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
|
||||||
raise EntryInvalidError(
|
|
||||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
|
||||||
"This is a noop. Check you are setting depth bigger than 0."
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(self.iterable, YAMLTag):
|
|
||||||
iterable = self.iterable.resolve(entry, blueprint)
|
|
||||||
else:
|
|
||||||
iterable = self.iterable
|
|
||||||
|
|
||||||
if not isinstance(iterable, Iterable):
|
|
||||||
raise EntryInvalidError(
|
|
||||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
|
||||||
"such as a sequence or a mapping"
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(iterable, Mapping):
|
|
||||||
iterable = tuple(iterable.items())
|
|
||||||
else:
|
|
||||||
iterable = tuple(enumerate(iterable))
|
|
||||||
|
|
||||||
try:
|
|
||||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
|
||||||
except KeyError as exc:
|
|
||||||
raise EntryInvalidError(exc)
|
|
||||||
|
|
||||||
result = output_class()
|
|
||||||
|
|
||||||
self.__current_context = tuple()
|
|
||||||
|
|
||||||
try:
|
|
||||||
for item in iterable:
|
|
||||||
self.__current_context = item
|
|
||||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
|
||||||
result = add_fn(result, resolved_body)
|
|
||||||
if not isinstance(result, output_class):
|
|
||||||
raise EntryInvalidError(
|
|
||||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
self.__current_context = tuple()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class EnumeratedItem(YAMLTag):
|
|
||||||
"""Get the current item value and index provided by an Enumerate tag context"""
|
|
||||||
|
|
||||||
depth: int
|
|
||||||
|
|
||||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.depth = int(node.value)
|
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
try:
|
|
||||||
context_tag: Enumerate = entry._get_tag_context(
|
|
||||||
depth=self.depth,
|
|
||||||
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
|
|
||||||
)
|
|
||||||
except ValueError as exc:
|
|
||||||
if self.depth == 0:
|
|
||||||
raise EntryInvalidError(
|
|
||||||
f"{self.__class__.__name__} tags are only usable "
|
|
||||||
f"inside an {Enumerate.__name__} tag"
|
|
||||||
)
|
|
||||||
|
|
||||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
|
||||||
|
|
||||||
return context_tag.get_context(entry, blueprint)
|
|
||||||
|
|
||||||
|
|
||||||
class Index(EnumeratedItem):
|
|
||||||
"""Get the current item index provided by an Enumerate tag context"""
|
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
context = super().resolve(entry, blueprint)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return context[0]
|
|
||||||
except IndexError: # pragma: no cover
|
|
||||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
|
||||||
|
|
||||||
|
|
||||||
class Value(EnumeratedItem):
|
|
||||||
"""Get the current item value provided by an Enumerate tag context"""
|
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
|
||||||
context = super().resolve(entry, blueprint)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return context[1]
|
|
||||||
except IndexError: # pragma: no cover
|
|
||||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
|
||||||
|
|
||||||
|
|
||||||
class BlueprintDumper(SafeDumper):
|
class BlueprintDumper(SafeDumper):
|
||||||
"""Dump dataclasses to yaml"""
|
"""Dump dataclasses to yaml"""
|
||||||
|
|
||||||
@ -560,9 +394,6 @@ class BlueprintLoader(SafeLoader):
|
|||||||
self.add_constructor("!Condition", Condition)
|
self.add_constructor("!Condition", Condition)
|
||||||
self.add_constructor("!If", If)
|
self.add_constructor("!If", If)
|
||||||
self.add_constructor("!Env", Env)
|
self.add_constructor("!Env", Env)
|
||||||
self.add_constructor("!Enumerate", Enumerate)
|
|
||||||
self.add_constructor("!Value", Value)
|
|
||||||
self.add_constructor("!Index", Index)
|
|
||||||
|
|
||||||
|
|
||||||
class EntryInvalidError(SentryIgnoredException):
|
class EntryInvalidError(SentryIgnoredException):
|
||||||
|
@ -7,7 +7,6 @@ from dacite.config import Config
|
|||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.core.exceptions import FieldError
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q
|
||||||
@ -40,10 +39,6 @@ from authentik.lib.models import SerializerModel
|
|||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
|
||||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
|
||||||
|
|
||||||
|
|
||||||
def is_model_allowed(model: type[Model]) -> bool:
|
def is_model_allowed(model: type[Model]) -> bool:
|
||||||
"""Check if model is allowed"""
|
"""Check if model is allowed"""
|
||||||
@ -162,12 +157,7 @@ class Importer:
|
|||||||
raise EntryInvalidError(f"Model {model} not allowed")
|
raise EntryInvalidError(f"Model {model} not allowed")
|
||||||
if issubclass(model, BaseMetaModel):
|
if issubclass(model, BaseMetaModel):
|
||||||
serializer_class: type[Serializer] = model.serializer()
|
serializer_class: type[Serializer] = model.serializer()
|
||||||
serializer = serializer_class(
|
serializer = serializer_class(data=entry.get_attrs(self.__import))
|
||||||
data=entry.get_attrs(self.__import),
|
|
||||||
context={
|
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
@ -191,10 +181,7 @@ class Importer:
|
|||||||
if not query:
|
if not query:
|
||||||
raise EntryInvalidError("No or invalid identifiers")
|
raise EntryInvalidError("No or invalid identifiers")
|
||||||
|
|
||||||
try:
|
|
||||||
existing_models = model.objects.filter(query)
|
existing_models = model.objects.filter(query)
|
||||||
except FieldError as exc:
|
|
||||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
|
||||||
|
|
||||||
serializer_kwargs = {}
|
serializer_kwargs = {}
|
||||||
model_instance = existing_models.first()
|
model_instance = existing_models.first()
|
||||||
@ -226,12 +213,7 @@ class Importer:
|
|||||||
always_merger.merge(full_data, updated_identifiers)
|
always_merger.merge(full_data, updated_identifiers)
|
||||||
serializer_kwargs["data"] = full_data
|
serializer_kwargs["data"] = full_data
|
||||||
|
|
||||||
serializer: Serializer = model().serializer(
|
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||||
context={
|
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
|
||||||
},
|
|
||||||
**serializer_kwargs,
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
@ -249,6 +231,7 @@ class Importer:
|
|||||||
raise IntegrityError
|
raise IntegrityError
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
self.logger.debug("Committing changes")
|
self.logger.debug("Committing changes")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -3,4 +3,3 @@
|
|||||||
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
|
||||||
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
|
||||||
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
|
||||||
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"
|
|
||||||
|
@ -56,4 +56,5 @@ class MetaApplyBlueprint(BaseMetaModel):
|
|||||||
return ApplyBlueprintMetaSerializer
|
return ApplyBlueprintMetaSerializer
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
abstract = True
|
abstract = True
|
||||||
|
@ -14,6 +14,7 @@ class BaseMetaModel(Model):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,98 +0,0 @@
|
|||||||
"""OCI Client"""
|
|
||||||
from typing import Any
|
|
||||||
from urllib.parse import ParseResult, urlparse
|
|
||||||
|
|
||||||
from opencontainers.distribution.reggie import (
|
|
||||||
NewClient,
|
|
||||||
WithDebug,
|
|
||||||
WithDefaultName,
|
|
||||||
WithDigest,
|
|
||||||
WithReference,
|
|
||||||
WithUserAgent,
|
|
||||||
WithUsernamePassword,
|
|
||||||
)
|
|
||||||
from requests.exceptions import RequestException
|
|
||||||
from structlog import get_logger
|
|
||||||
from structlog.stdlib import BoundLogger
|
|
||||||
|
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
|
||||||
from authentik.lib.utils.http import authentik_user_agent
|
|
||||||
|
|
||||||
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
|
|
||||||
|
|
||||||
|
|
||||||
class OCIException(SentryIgnoredException):
|
|
||||||
"""OCI-related errors"""
|
|
||||||
|
|
||||||
|
|
||||||
class BlueprintOCIClient:
|
|
||||||
"""Blueprint OCI Client"""
|
|
||||||
|
|
||||||
url: ParseResult
|
|
||||||
sanitized_url: str
|
|
||||||
logger: BoundLogger
|
|
||||||
ref: str
|
|
||||||
client: NewClient
|
|
||||||
|
|
||||||
def __init__(self, url: str) -> None:
|
|
||||||
self._parse_url(url)
|
|
||||||
self.logger = get_logger().bind(url=self.sanitized_url)
|
|
||||||
|
|
||||||
self.ref = "latest"
|
|
||||||
path = self.url.path[1:]
|
|
||||||
if ":" in self.url.path:
|
|
||||||
path, _, self.ref = path.partition(":")
|
|
||||||
self.client = NewClient(
|
|
||||||
f"https://{self.url.hostname}",
|
|
||||||
WithUserAgent(authentik_user_agent()),
|
|
||||||
WithUsernamePassword(self.url.username, self.url.password),
|
|
||||||
WithDefaultName(path),
|
|
||||||
WithDebug(True),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _parse_url(self, url: str):
|
|
||||||
self.url = urlparse(url)
|
|
||||||
netloc = self.url.netloc
|
|
||||||
if "@" in netloc:
|
|
||||||
netloc = netloc[netloc.index("@") + 1 :]
|
|
||||||
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
|
|
||||||
|
|
||||||
def fetch_manifests(self) -> dict[str, Any]:
|
|
||||||
"""Fetch manifests for ref"""
|
|
||||||
self.logger.info("Fetching OCI manifests for blueprint")
|
|
||||||
manifest_request = self.client.NewRequest(
|
|
||||||
"GET",
|
|
||||||
"/v2/<name>/manifests/<reference>",
|
|
||||||
WithReference(self.ref),
|
|
||||||
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
|
|
||||||
try:
|
|
||||||
manifest_response = self.client.Do(manifest_request)
|
|
||||||
manifest_response.raise_for_status()
|
|
||||||
except RequestException as exc:
|
|
||||||
raise OCIException(exc) from exc
|
|
||||||
manifest = manifest_response.json()
|
|
||||||
if "errors" in manifest:
|
|
||||||
raise OCIException(manifest["errors"])
|
|
||||||
return manifest
|
|
||||||
|
|
||||||
def fetch_blobs(self, manifest: dict[str, Any]):
|
|
||||||
"""Fetch blob based on manifest info"""
|
|
||||||
blob = None
|
|
||||||
for layer in manifest.get("layers", []):
|
|
||||||
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
|
|
||||||
blob = layer.get("digest")
|
|
||||||
self.logger.debug("Found layer with matching media type", blob=blob)
|
|
||||||
if not blob:
|
|
||||||
raise OCIException("Blob not found")
|
|
||||||
|
|
||||||
blob_request = self.client.NewRequest(
|
|
||||||
"GET",
|
|
||||||
"/v2/<name>/blobs/<digest>",
|
|
||||||
WithDigest(blob),
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
blob_response = self.client.Do(blob_request)
|
|
||||||
blob_response.raise_for_status()
|
|
||||||
return blob_response.text
|
|
||||||
except RequestException as exc:
|
|
||||||
raise OCIException(exc) from exc
|
|
@ -76,7 +76,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
|||||||
return
|
return
|
||||||
if isinstance(event, FileCreatedEvent):
|
if isinstance(event, FileCreatedEvent):
|
||||||
LOGGER.debug("new blueprint file created, starting discovery")
|
LOGGER.debug("new blueprint file created, starting discovery")
|
||||||
blueprints_discovery.delay()
|
blueprints_discover.delay()
|
||||||
if isinstance(event, FileModifiedEvent):
|
if isinstance(event, FileModifiedEvent):
|
||||||
path = Path(event.src_path)
|
path = Path(event.src_path)
|
||||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
||||||
@ -122,7 +122,7 @@ def blueprints_find():
|
|||||||
)
|
)
|
||||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||||
blueprints.append(blueprint)
|
blueprints.append(blueprint)
|
||||||
LOGGER.debug(
|
LOGGER.info(
|
||||||
"parsed & loaded blueprint",
|
"parsed & loaded blueprint",
|
||||||
hash=file_hash,
|
hash=file_hash,
|
||||||
path=str(path),
|
path=str(path),
|
||||||
@ -134,7 +134,7 @@ def blueprints_find():
|
|||||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||||
)
|
)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def blueprints_discovery(self: MonitoredTask):
|
def blueprints_discover(self: MonitoredTask):
|
||||||
"""Find blueprints and check if they need to be created in the database"""
|
"""Find blueprints and check if they need to be created in the database"""
|
||||||
count = 0
|
count = 0
|
||||||
for blueprint in blueprints_find():
|
for blueprint in blueprints_find():
|
||||||
@ -219,14 +219,3 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
|||||||
finally:
|
finally:
|
||||||
if instance:
|
if instance:
|
||||||
instance.save()
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
|
||||||
def clear_failed_blueprints():
|
|
||||||
"""Remove blueprints which couldn't be fetched"""
|
|
||||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
|
||||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
|
|
||||||
try:
|
|
||||||
blueprint.retrieve()
|
|
||||||
except BlueprintRetrievalFailed:
|
|
||||||
blueprint.delete()
|
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
"""Application API Views"""
|
"""Application API Views"""
|
||||||
from datetime import timedelta
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.db.models.functions import ExtractHour
|
|
||||||
from django.http.response import HttpResponseBadRequest
|
from django.http.response import HttpResponseBadRequest
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
@ -37,6 +35,7 @@ from authentik.lib.utils.file import (
|
|||||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.policies.types import PolicyResult
|
from authentik.policies.types import PolicyResult
|
||||||
|
from authentik.stages.user_login.stage import USER_LOGIN_AUTHENTICATED
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -62,6 +61,7 @@ class ApplicationSerializer(ModelSerializer):
|
|||||||
return app.get_launch_url(user)
|
return app.get_launch_url(user)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Application
|
model = Application
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -185,6 +185,10 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if superuser_full_list and request.user.is_superuser:
|
if superuser_full_list and request.user.is_superuser:
|
||||||
return super().list(request)
|
return super().list(request)
|
||||||
|
|
||||||
|
# To prevent the user from having to double login when prompt is set to login
|
||||||
|
# and the user has just signed it. This session variable is set in the UserLoginStage
|
||||||
|
# and is (quite hackily) removed from the session in applications's API's List method
|
||||||
|
self.request.session.pop(USER_LOGIN_AUTHENTICATED, None)
|
||||||
queryset = self._filter_queryset_for_list(self.get_queryset())
|
queryset = self._filter_queryset_for_list(self.get_queryset())
|
||||||
self.paginate_queryset(queryset)
|
self.paginate_queryset(queryset)
|
||||||
|
|
||||||
@ -221,6 +225,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
parser_classes=(MultiPartParser,),
|
parser_classes=(MultiPartParser,),
|
||||||
)
|
)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def set_icon(self, request: Request, slug: str):
|
def set_icon(self, request: Request, slug: str):
|
||||||
"""Set application icon"""
|
"""Set application icon"""
|
||||||
app: Application = self.get_object()
|
app: Application = self.get_object()
|
||||||
@ -240,6 +245,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
filter_backends=[],
|
filter_backends=[],
|
||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
)
|
)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def set_icon_url(self, request: Request, slug: str):
|
def set_icon_url(self, request: Request, slug: str):
|
||||||
"""Set application icon (as URL)"""
|
"""Set application icon (as URL)"""
|
||||||
app: Application = self.get_object()
|
app: Application = self.get_object()
|
||||||
@ -248,14 +254,15 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
|
||||||
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
@extend_schema(responses={200: CoordinateSerializer(many=True)})
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def metrics(self, request: Request, slug: str):
|
def metrics(self, request: Request, slug: str):
|
||||||
"""Metrics for application logins"""
|
"""Metrics for application logins"""
|
||||||
app = self.get_object()
|
app = self.get_object()
|
||||||
return Response(
|
return Response(
|
||||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||||
|
.filter(
|
||||||
action=EventAction.AUTHORIZE_APPLICATION,
|
action=EventAction.AUTHORIZE_APPLICATION,
|
||||||
context__authorized_application__pk=app.pk.hex,
|
context__authorized_application__pk=app.pk.hex,
|
||||||
)
|
)
|
||||||
# 3 data points per day, so 8 hour spans
|
.get_events_per_hour()
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
@ -74,6 +74,7 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||||||
return GEOIP_READER.city_dict(instance.last_ip)
|
return GEOIP_READER.city_dict(instance.last_ip)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = AuthenticatedSession
|
model = AuthenticatedSession
|
||||||
fields = [
|
fields = [
|
||||||
"uuid",
|
"uuid",
|
||||||
|
@ -24,10 +24,12 @@ from authentik.core.models import Group, User
|
|||||||
class GroupMemberSerializer(ModelSerializer):
|
class GroupMemberSerializer(ModelSerializer):
|
||||||
"""Stripped down user serializer to show relevant users for groups"""
|
"""Stripped down user serializer to show relevant users for groups"""
|
||||||
|
|
||||||
|
avatar = CharField(read_only=True)
|
||||||
attributes = JSONField(validators=[is_dict], required=False)
|
attributes = JSONField(validators=[is_dict], required=False)
|
||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -36,6 +38,7 @@ class GroupMemberSerializer(ModelSerializer):
|
|||||||
"is_active",
|
"is_active",
|
||||||
"last_login",
|
"last_login",
|
||||||
"email",
|
"email",
|
||||||
|
"avatar",
|
||||||
"attributes",
|
"attributes",
|
||||||
"uid",
|
"uid",
|
||||||
]
|
]
|
||||||
@ -53,6 +56,7 @@ class GroupSerializer(ModelSerializer):
|
|||||||
num_pk = IntegerField(read_only=True)
|
num_pk = IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Group
|
model = Group
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -92,6 +96,7 @@ class GroupFilter(FilterSet):
|
|||||||
queryset=User.objects.all(),
|
queryset=User.objects.all(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def filter_attributes(self, queryset, name, value):
|
def filter_attributes(self, queryset, name, value):
|
||||||
"""Filter attributes by query args"""
|
"""Filter attributes by query args"""
|
||||||
try:
|
try:
|
||||||
@ -110,6 +115,7 @@ class GroupFilter(FilterSet):
|
|||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Group
|
model = Group
|
||||||
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
fields = ["name", "is_superuser", "members_by_pk", "attributes", "members_by_username"]
|
||||||
|
|
||||||
@ -151,6 +157,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=unused-argument, invalid-name
|
||||||
def add_user(self, request: Request, pk: str) -> Response:
|
def add_user(self, request: Request, pk: str) -> Response:
|
||||||
"""Add user to group"""
|
"""Add user to group"""
|
||||||
group: Group = self.get_object()
|
group: Group = self.get_object()
|
||||||
@ -175,6 +182,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=unused-argument, invalid-name
|
||||||
def remove_user(self, request: Request, pk: str) -> Response:
|
def remove_user(self, request: Request, pk: str) -> Response:
|
||||||
"""Add user to group"""
|
"""Add user to group"""
|
||||||
group: Group = self.get_object()
|
group: Group = self.get_object()
|
||||||
|
@ -49,6 +49,7 @@ class PropertyMappingSerializer(ManagedSerializer, ModelSerializer, MetaNameSeri
|
|||||||
return expression
|
return expression
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = PropertyMapping
|
model = PropertyMapping
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -116,6 +117,7 @@ class PropertyMappingViewSet(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||||
|
# pylint: disable=unused-argument, invalid-name
|
||||||
def test(self, request: Request, pk: str) -> Response:
|
def test(self, request: Request, pk: str) -> Response:
|
||||||
"""Test Property Mapping"""
|
"""Test Property Mapping"""
|
||||||
mapping: PropertyMapping = self.get_object()
|
mapping: PropertyMapping = self.get_object()
|
||||||
|
@ -31,11 +31,11 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
return obj.component
|
return obj.component
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Provider
|
model = Provider
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
"name",
|
"name",
|
||||||
"authentication_flow",
|
|
||||||
"authorization_flow",
|
"authorization_flow",
|
||||||
"property_mappings",
|
"property_mappings",
|
||||||
"component",
|
"component",
|
||||||
@ -45,9 +45,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
"meta_model_name",
|
"meta_model_name",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
|
||||||
"authorization_flow": {"required": True, "allow_null": False},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ProviderViewSet(
|
class ProviderViewSet(
|
||||||
|
@ -46,6 +46,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
return obj.component
|
return obj.component
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Source
|
model = Source
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -101,6 +102,7 @@ class SourceViewSet(
|
|||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
parser_classes=(MultiPartParser,),
|
parser_classes=(MultiPartParser,),
|
||||||
)
|
)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def set_icon(self, request: Request, slug: str):
|
def set_icon(self, request: Request, slug: str):
|
||||||
"""Set source icon"""
|
"""Set source icon"""
|
||||||
source: Source = self.get_object()
|
source: Source = self.get_object()
|
||||||
@ -120,6 +122,7 @@ class SourceViewSet(
|
|||||||
filter_backends=[],
|
filter_backends=[],
|
||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
)
|
)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def set_icon_url(self, request: Request, slug: str):
|
def set_icon_url(self, request: Request, slug: str):
|
||||||
"""Set source icon (as URL)"""
|
"""Set source icon (as URL)"""
|
||||||
source: Source = self.get_object()
|
source: Source = self.get_object()
|
||||||
@ -206,6 +209,5 @@ class UserSourceConnectionViewSet(
|
|||||||
queryset = UserSourceConnection.objects.all()
|
queryset = UserSourceConnection.objects.all()
|
||||||
serializer_class = UserSourceConnectionSerializer
|
serializer_class = UserSourceConnectionSerializer
|
||||||
permission_classes = [OwnerSuperuserPermissions]
|
permission_classes = [OwnerSuperuserPermissions]
|
||||||
filterset_fields = ["user"]
|
|
||||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
ordering = ["pk"]
|
ordering = ["pk"]
|
||||||
|
@ -16,7 +16,6 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.blueprints.api import ManagedSerializer
|
from authentik.blueprints.api import ManagedSerializer
|
||||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.users import UserSerializer
|
from authentik.core.api.users import UserSerializer
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
@ -30,20 +29,9 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
|||||||
|
|
||||||
user_obj = UserSerializer(required=False, source="user", read_only=True)
|
user_obj = UserSerializer(required=False, source="user", read_only=True)
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None:
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
|
||||||
self.fields["key"] = CharField()
|
|
||||||
|
|
||||||
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
|
||||||
"""Ensure only API or App password tokens are created."""
|
"""Ensure only API or App password tokens are created."""
|
||||||
request: Request = self.context.get("request")
|
request: Request = self.context["request"]
|
||||||
if not request:
|
|
||||||
if "user" not in attrs:
|
|
||||||
raise ValidationError("Missing user")
|
|
||||||
if "intent" not in attrs:
|
|
||||||
raise ValidationError("Missing intent")
|
|
||||||
else:
|
|
||||||
attrs.setdefault("user", request.user)
|
attrs.setdefault("user", request.user)
|
||||||
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
||||||
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
||||||
@ -51,6 +39,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
|||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Token
|
model = Token
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -123,6 +112,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
|
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def view_key(self, request: Request, identifier: str) -> Response:
|
def view_key(self, request: Request, identifier: str) -> Response:
|
||||||
"""Return token key and log access"""
|
"""Return token key and log access"""
|
||||||
token: Token = self.get_object()
|
token: Token = self.get_object()
|
||||||
@ -144,11 +134,11 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def set_key(self, request: Request, identifier: str) -> Response:
|
def set_key(self, request: Request, identifier: str) -> Response:
|
||||||
"""Set token key. Action is logged as event. `authentik_core.set_token_key` permission
|
"""Return token key and log access"""
|
||||||
is required."""
|
|
||||||
token: Token = self.get_object()
|
token: Token = self.get_object()
|
||||||
key = request.data.get("key")
|
key = request.POST.get("key")
|
||||||
if not key:
|
if not key:
|
||||||
return Response(status=400)
|
return Response(status=400)
|
||||||
token.key = key
|
token.key = key
|
||||||
|
@ -53,7 +53,7 @@ class UsedByMixin:
|
|||||||
responses={200: UsedBySerializer(many=True)},
|
responses={200: UsedBySerializer(many=True)},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=invalid-name, unused-argument, too-many-locals
|
||||||
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
def used_by(self, request: Request, *args, **kwargs) -> Response:
|
||||||
"""Get a list of all objects that use this object"""
|
"""Get a list of all objects that use this object"""
|
||||||
# pyright: reportGeneralTypeIssues=false
|
# pyright: reportGeneralTypeIssues=false
|
||||||
|
@ -4,12 +4,10 @@ from json import loads
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from django.contrib.auth import update_session_auth_hash
|
from django.contrib.auth import update_session_auth_hash
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
|
||||||
from django.core.cache import cache
|
|
||||||
from django.db.models.functions import ExtractHour
|
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
|
from django.urls import reverse_lazy
|
||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@ -37,13 +35,11 @@ from rest_framework.request import Request
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import (
|
from rest_framework.serializers import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
DateTimeField,
|
|
||||||
ListSerializer,
|
ListSerializer,
|
||||||
ModelSerializer,
|
ModelSerializer,
|
||||||
PrimaryKeyRelatedField,
|
PrimaryKeyRelatedField,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
from rest_framework.validators import UniqueValidator
|
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
@ -60,23 +56,19 @@ from authentik.core.models import (
|
|||||||
USER_ATTRIBUTE_SA,
|
USER_ATTRIBUTE_SA,
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||||
USER_PATH_SERVICE_ACCOUNT,
|
USER_PATH_SERVICE_ACCOUNT,
|
||||||
AuthenticatedSession,
|
|
||||||
Group,
|
Group,
|
||||||
Token,
|
Token,
|
||||||
TokenIntents,
|
TokenIntents,
|
||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from authentik.events.models import EventAction
|
from authentik.events.models import EventAction
|
||||||
from authentik.flows.exceptions import FlowNonApplicableException
|
|
||||||
from authentik.flows.models import FlowToken
|
from authentik.flows.models import FlowToken
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||||
from authentik.flows.views.executor import QS_KEY_TOKEN
|
from authentik.flows.views.executor import QS_KEY_TOKEN
|
||||||
from authentik.interfaces.models import InterfaceType
|
|
||||||
from authentik.interfaces.views import reverse_interface
|
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.stages.email.tasks import send_mails
|
from authentik.stages.email.tasks import send_mails
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
from authentik.tenants.utils import get_tenant
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -88,6 +80,7 @@ class UserGroupSerializer(ModelSerializer):
|
|||||||
parent_name = CharField(source="parent.name", read_only=True)
|
parent_name = CharField(source="parent.name", read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Group
|
model = Group
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -111,7 +104,7 @@ class UserSerializer(ModelSerializer):
|
|||||||
)
|
)
|
||||||
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
|
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
|
||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
|
username = CharField(max_length=150)
|
||||||
|
|
||||||
def validate_path(self, path: str) -> str:
|
def validate_path(self, path: str) -> str:
|
||||||
"""Validate path"""
|
"""Validate path"""
|
||||||
@ -123,6 +116,7 @@ class UserSerializer(ModelSerializer):
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -174,6 +168,7 @@ class UserSelfSerializer(ModelSerializer):
|
|||||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
@ -204,47 +199,38 @@ class SessionUserSerializer(PassiveSerializer):
|
|||||||
class UserMetricsSerializer(PassiveSerializer):
|
class UserMetricsSerializer(PassiveSerializer):
|
||||||
"""User Metrics"""
|
"""User Metrics"""
|
||||||
|
|
||||||
logins = SerializerMethodField()
|
logins_per_1h = SerializerMethodField()
|
||||||
logins_failed = SerializerMethodField()
|
logins_failed_per_1h = SerializerMethodField()
|
||||||
authorizations = SerializerMethodField()
|
authorizations_per_1h = SerializerMethodField()
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins(self, _):
|
def get_logins_per_1h(self, _):
|
||||||
"""Get successful logins per 8 hours for the last 7 days"""
|
"""Get successful logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
request = self.context["request"]
|
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.LOGIN, user__pk=user.pk
|
.filter(action=EventAction.LOGIN, user__pk=user.pk)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_logins_failed(self, _):
|
def get_logins_failed_per_1h(self, _):
|
||||||
"""Get failed logins per 8 hours for the last 7 days"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
request = self.context["request"]
|
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.LOGIN_FAILED, context__username=user.username
|
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@extend_schema_field(CoordinateSerializer(many=True))
|
@extend_schema_field(CoordinateSerializer(many=True))
|
||||||
def get_authorizations(self, _):
|
def get_authorizations_per_1h(self, _):
|
||||||
"""Get failed logins per 8 hours for the last 7 days"""
|
"""Get failed logins per hour for the last 24 hours"""
|
||||||
user = self.context["user"]
|
user = self.context["user"]
|
||||||
request = self.context["request"]
|
|
||||||
return (
|
return (
|
||||||
get_objects_for_user(request.user, "authentik_events.view_event").filter(
|
get_objects_for_user(user, "authentik_events.view_event")
|
||||||
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
|
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
|
||||||
)
|
.get_events_per_hour()
|
||||||
# 3 data points per day, so 8 hour spans
|
|
||||||
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -276,6 +262,7 @@ class UsersFilter(FilterSet):
|
|||||||
queryset=Group.objects.all(),
|
queryset=Group.objects.all(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def filter_attributes(self, queryset, name, value):
|
def filter_attributes(self, queryset, name, value):
|
||||||
"""Filter attributes by query args"""
|
"""Filter attributes by query args"""
|
||||||
try:
|
try:
|
||||||
@ -322,7 +309,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]:
|
def _create_recovery_link(self) -> tuple[Optional[str], Optional[Token]]:
|
||||||
"""Create a recovery link (when the current tenant has a recovery flow set),
|
"""Create a recovery link (when the current tenant has a recovery flow set),
|
||||||
that can either be shown to an admin or sent to the user directly"""
|
that can either be shown to an admin or sent to the user directly"""
|
||||||
tenant = get_tenant(self.request)
|
tenant: Tenant = self.request._request.tenant
|
||||||
# Check that there is a recovery flow, if not return an error
|
# Check that there is a recovery flow, if not return an error
|
||||||
flow = tenant.flow_recovery
|
flow = tenant.flow_recovery
|
||||||
if not flow:
|
if not flow:
|
||||||
@ -331,16 +318,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
user: User = self.get_object()
|
user: User = self.get_object()
|
||||||
planner = FlowPlanner(flow)
|
planner = FlowPlanner(flow)
|
||||||
planner.allow_empty_flows = True
|
planner.allow_empty_flows = True
|
||||||
try:
|
|
||||||
plan = planner.plan(
|
plan = planner.plan(
|
||||||
self.request._request,
|
self.request._request,
|
||||||
{
|
{
|
||||||
PLAN_CONTEXT_PENDING_USER: user,
|
PLAN_CONTEXT_PENDING_USER: user,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
except FlowNonApplicableException:
|
|
||||||
LOGGER.warning("Recovery flow not applicable to user")
|
|
||||||
return None, None
|
|
||||||
token, __ = FlowToken.objects.update_or_create(
|
token, __ = FlowToken.objects.update_or_create(
|
||||||
identifier=f"{user.uid}-password-reset",
|
identifier=f"{user.uid}-password-reset",
|
||||||
defaults={
|
defaults={
|
||||||
@ -351,12 +334,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
)
|
)
|
||||||
querystring = urlencode({QS_KEY_TOKEN: token.key})
|
querystring = urlencode({QS_KEY_TOKEN: token.key})
|
||||||
link = self.request.build_absolute_uri(
|
link = self.request.build_absolute_uri(
|
||||||
reverse_interface(
|
reverse_lazy("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
|
||||||
self.request,
|
+ f"?{querystring}"
|
||||||
InterfaceType.FLOW,
|
|
||||||
flow_slug=flow.slug,
|
|
||||||
),
|
|
||||||
+f"?{querystring}",
|
|
||||||
)
|
)
|
||||||
return link, token
|
return link, token
|
||||||
|
|
||||||
@ -367,11 +346,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
{
|
{
|
||||||
"name": CharField(required=True),
|
"name": CharField(required=True),
|
||||||
"create_group": BooleanField(default=False),
|
"create_group": BooleanField(default=False),
|
||||||
"expiring": BooleanField(default=True),
|
|
||||||
"expires": DateTimeField(
|
|
||||||
required=False,
|
|
||||||
help_text="If not provided, valid for 360 days",
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
responses={
|
responses={
|
||||||
@ -392,20 +366,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
"""Create a new user account that is marked as a service account"""
|
"""Create a new user account that is marked as a service account"""
|
||||||
username = request.data.get("name")
|
username = request.data.get("name")
|
||||||
create_group = request.data.get("create_group", False)
|
create_group = request.data.get("create_group", False)
|
||||||
expiring = request.data.get("expiring", True)
|
|
||||||
expires = request.data.get("expires", now() + timedelta(days=360))
|
|
||||||
|
|
||||||
with atomic():
|
with atomic():
|
||||||
try:
|
try:
|
||||||
user: User = User.objects.create(
|
user = User.objects.create(
|
||||||
username=username,
|
username=username,
|
||||||
name=username,
|
name=username,
|
||||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
|
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||||
path=USER_PATH_SERVICE_ACCOUNT,
|
path=USER_PATH_SERVICE_ACCOUNT,
|
||||||
)
|
)
|
||||||
user.set_unusable_password()
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"username": user.username,
|
"username": user.username,
|
||||||
"user_uid": user.uid,
|
"user_uid": user.uid,
|
||||||
@ -421,17 +389,17 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
identifier=slugify(f"service-account-{username}-password"),
|
identifier=slugify(f"service-account-{username}-password"),
|
||||||
intent=TokenIntents.INTENT_APP_PASSWORD,
|
intent=TokenIntents.INTENT_APP_PASSWORD,
|
||||||
user=user,
|
user=user,
|
||||||
expires=expires,
|
expires=now() + timedelta(days=360),
|
||||||
expiring=expiring,
|
|
||||||
)
|
)
|
||||||
response["token"] = token.key
|
response["token"] = token.key
|
||||||
return Response(response)
|
return Response(response)
|
||||||
except IntegrityError as exc:
|
except (IntegrityError) as exc:
|
||||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||||
|
|
||||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||||
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
|
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||||
def user_me(self, request: Request) -> Response:
|
# pylint: disable=invalid-name
|
||||||
|
def me(self, request: Request) -> Response:
|
||||||
"""Get information about current user"""
|
"""Get information about current user"""
|
||||||
context = {"request": request}
|
context = {"request": request}
|
||||||
serializer = SessionUserSerializer(
|
serializer = SessionUserSerializer(
|
||||||
@ -459,6 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, methods=["POST"])
|
@action(detail=True, methods=["POST"])
|
||||||
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def set_password(self, request: Request, pk: int) -> Response:
|
def set_password(self, request: Request, pk: int) -> Response:
|
||||||
"""Set password for user"""
|
"""Set password for user"""
|
||||||
user: User = self.get_object()
|
user: User = self.get_object()
|
||||||
@ -476,12 +445,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
|
||||||
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def metrics(self, request: Request, pk: int) -> Response:
|
def metrics(self, request: Request, pk: int) -> Response:
|
||||||
"""User metrics per 1h"""
|
"""User metrics per 1h"""
|
||||||
user: User = self.get_object()
|
user: User = self.get_object()
|
||||||
serializer = UserMetricsSerializer(instance={})
|
serializer = UserMetricsSerializer(True)
|
||||||
serializer.context["user"] = user
|
serializer.context["user"] = user
|
||||||
serializer.context["request"] = request
|
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
|
|
||||||
@permission_required("authentik_core.reset_user_password")
|
@permission_required("authentik_core.reset_user_password")
|
||||||
@ -492,6 +461,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def recovery(self, request: Request, pk: int) -> Response:
|
def recovery(self, request: Request, pk: int) -> Response:
|
||||||
"""Create a temporary link that a user can use to recover their accounts"""
|
"""Create a temporary link that a user can use to recover their accounts"""
|
||||||
link, _ = self._create_recovery_link()
|
link, _ = self._create_recovery_link()
|
||||||
@ -516,6 +486,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def recovery_email(self, request: Request, pk: int) -> Response:
|
def recovery_email(self, request: Request, pk: int) -> Response:
|
||||||
"""Create a temporary link that a user can use to recover their accounts"""
|
"""Create a temporary link that a user can use to recover their accounts"""
|
||||||
for_user: User = self.get_object()
|
for_user: User = self.get_object()
|
||||||
@ -589,14 +560,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def partial_update(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
response = super().partial_update(request, *args, **kwargs)
|
|
||||||
instance: User = self.get_object()
|
|
||||||
if not instance.is_active:
|
|
||||||
sessions = AuthenticatedSession.objects.filter(user=instance)
|
|
||||||
session_ids = sessions.values_list("session_key", flat=True)
|
|
||||||
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
|
|
||||||
sessions.delete()
|
|
||||||
LOGGER.debug("Deleted user's sessions", user=instance.username)
|
|
||||||
return response
|
|
||||||
|
@ -11,7 +11,6 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
|||||||
label = "authentik_core"
|
label = "authentik_core"
|
||||||
verbose_name = "authentik Core"
|
verbose_name = "authentik Core"
|
||||||
mountpoint = ""
|
mountpoint = ""
|
||||||
ws_mountpoint = "authentik.core.urls"
|
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
def reconcile_load_core_signals(self):
|
def reconcile_load_core_signals(self):
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
"""Property Mapping Evaluator"""
|
"""Property Mapping Evaluator"""
|
||||||
from typing import Any, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from prometheus_client import Histogram
|
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
@ -11,24 +10,15 @@ from authentik.lib.expression.evaluator import BaseEvaluator
|
|||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.policies.types import PolicyRequest
|
from authentik.policies.types import PolicyRequest
|
||||||
|
|
||||||
PROPERTY_MAPPING_TIME = Histogram(
|
|
||||||
"authentik_property_mapping_execution_time",
|
|
||||||
"Evaluation time of property mappings",
|
|
||||||
["mapping_name"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PropertyMappingEvaluator(BaseEvaluator):
|
class PropertyMappingEvaluator(BaseEvaluator):
|
||||||
"""Custom Evaluator that adds some different context variables."""
|
"""Custom Evaluator that adds some different context variables."""
|
||||||
|
|
||||||
dry_run: bool
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
model: Model,
|
model: Model,
|
||||||
user: Optional[User] = None,
|
user: Optional[User] = None,
|
||||||
request: Optional[HttpRequest] = None,
|
request: Optional[HttpRequest] = None,
|
||||||
dry_run: Optional[bool] = False,
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
if hasattr(model, "name"):
|
if hasattr(model, "name"):
|
||||||
@ -45,13 +35,9 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
req.http_request = request
|
req.http_request = request
|
||||||
self._context["request"] = req
|
self._context["request"] = req
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
self.dry_run = dry_run
|
|
||||||
|
|
||||||
def handle_error(self, exc: Exception, expression_source: str):
|
def handle_error(self, exc: Exception, expression_source: str):
|
||||||
"""Exception Handler"""
|
"""Exception Handler"""
|
||||||
# For dry-run requests we don't save exceptions
|
|
||||||
if self.dry_run:
|
|
||||||
return
|
|
||||||
error_string = exception_to_string(exc)
|
error_string = exception_to_string(exc)
|
||||||
event = Event.new(
|
event = Event.new(
|
||||||
EventAction.PROPERTY_MAPPING_EXCEPTION,
|
EventAction.PROPERTY_MAPPING_EXCEPTION,
|
||||||
@ -63,7 +49,3 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||||||
event.from_http(req.http_request, req.user)
|
event.from_http(req.http_request, req.user)
|
||||||
return
|
return
|
||||||
event.save()
|
event.save()
|
||||||
|
|
||||||
def evaluate(self, *args, **kwargs) -> Any:
|
|
||||||
with PROPERTY_MAPPING_TIME.labels(mapping_name=self._filename).time():
|
|
||||||
return super().evaluate(*args, **kwargs)
|
|
||||||
|
@ -49,6 +49,7 @@ class Command(BaseCommand):
|
|||||||
return namespace
|
return namespace
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def post_save_handler(sender, instance: Model, created: bool, **_):
|
def post_save_handler(sender, instance: Model, created: bool, **_):
|
||||||
"""Signal handler for all object's post_save"""
|
"""Signal handler for all object's post_save"""
|
||||||
if not should_log_model(instance):
|
if not should_log_model(instance):
|
||||||
@ -64,6 +65,7 @@ class Command(BaseCommand):
|
|||||||
).save()
|
).save()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def pre_delete_handler(sender, instance: Model, **_):
|
def pre_delete_handler(sender, instance: Model, **_):
|
||||||
"""Signal handler for all object's pre_delete"""
|
"""Signal handler for all object's pre_delete"""
|
||||||
if not should_log_model(instance): # pragma: no cover
|
if not should_log_model(instance): # pragma: no cover
|
||||||
|
@ -14,6 +14,7 @@ import authentik.core.models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
initial = True
|
initial = True
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
@ -43,10 +44,7 @@ class Migration(migrations.Migration):
|
|||||||
"is_superuser",
|
"is_superuser",
|
||||||
models.BooleanField(
|
models.BooleanField(
|
||||||
default=False,
|
default=False,
|
||||||
help_text=(
|
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
||||||
"Designates that this user has all permissions without explicitly"
|
|
||||||
" assigning them."
|
|
||||||
),
|
|
||||||
verbose_name="superuser status",
|
verbose_name="superuser status",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -54,9 +52,7 @@ class Migration(migrations.Migration):
|
|||||||
"username",
|
"username",
|
||||||
models.CharField(
|
models.CharField(
|
||||||
error_messages={"unique": "A user with that username already exists."},
|
error_messages={"unique": "A user with that username already exists."},
|
||||||
help_text=(
|
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||||
"Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only."
|
|
||||||
),
|
|
||||||
max_length=150,
|
max_length=150,
|
||||||
unique=True,
|
unique=True,
|
||||||
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
validators=[django.contrib.auth.validators.UnicodeUsernameValidator()],
|
||||||
@ -87,10 +83,7 @@ class Migration(migrations.Migration):
|
|||||||
"is_active",
|
"is_active",
|
||||||
models.BooleanField(
|
models.BooleanField(
|
||||||
default=True,
|
default=True,
|
||||||
help_text=(
|
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
||||||
"Designates whether this user should be treated as active. Unselect"
|
|
||||||
" this instead of deleting accounts."
|
|
||||||
),
|
|
||||||
verbose_name="active",
|
verbose_name="active",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -18,13 +18,13 @@ def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
akadmin, _ = User.objects.using(db_alias).get_or_create(
|
akadmin, _ = User.objects.using(db_alias).get_or_create(
|
||||||
username="akadmin",
|
username="akadmin", email="root@localhost", name="authentik Default Admin"
|
||||||
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
|
|
||||||
name="authentik Default Admin",
|
|
||||||
)
|
)
|
||||||
password = None
|
password = None
|
||||||
if "TF_BUILD" in environ or settings.TEST:
|
if "TF_BUILD" in environ or settings.TEST:
|
||||||
password = "akadmin" # noqa # nosec
|
password = "akadmin" # noqa # nosec
|
||||||
|
if "AK_ADMIN_PASS" in environ:
|
||||||
|
password = environ["AK_ADMIN_PASS"]
|
||||||
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
|
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
|
||||||
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
|
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
|
||||||
if password:
|
if password:
|
||||||
@ -51,6 +51,7 @@ def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_core", "0002_auto_20200523_1133"),
|
("authentik_core", "0002_auto_20200523_1133"),
|
||||||
("authentik_core", "0003_default_user"),
|
("authentik_core", "0003_default_user"),
|
||||||
@ -171,10 +172,7 @@ class Migration(migrations.Migration):
|
|||||||
name="groups",
|
name="groups",
|
||||||
field=models.ManyToManyField(
|
field=models.ManyToManyField(
|
||||||
blank=True,
|
blank=True,
|
||||||
help_text=(
|
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
||||||
"The groups this user belongs to. A user will get all permissions granted to"
|
|
||||||
" each of their groups."
|
|
||||||
),
|
|
||||||
related_name="user_set",
|
related_name="user_set",
|
||||||
related_query_name="user",
|
related_query_name="user",
|
||||||
to="auth.Group",
|
to="auth.Group",
|
||||||
|
@ -17,6 +17,7 @@ def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_core", "0012_auto_20201003_1737"),
|
("authentik_core", "0012_auto_20201003_1737"),
|
||||||
("authentik_core", "0013_auto_20201003_2132"),
|
("authentik_core", "0013_auto_20201003_2132"),
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0016_auto_20201202_2234"),
|
("authentik_core", "0016_auto_20201202_2234"),
|
||||||
]
|
]
|
||||||
@ -14,12 +15,7 @@ class Migration(migrations.Migration):
|
|||||||
name="managed",
|
name="managed",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
default=None,
|
default=None,
|
||||||
help_text=(
|
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||||
"Objects which are managed by authentik. These objects are created and updated"
|
|
||||||
" automatically. This is flag only indicates that an object can be overwritten"
|
|
||||||
" by migrations. You can still modify the objects via the API, but expect"
|
|
||||||
" changes to be overwritten in a later update."
|
|
||||||
),
|
|
||||||
null=True,
|
null=True,
|
||||||
verbose_name="Managed by authentik",
|
verbose_name="Managed by authentik",
|
||||||
unique=True,
|
unique=True,
|
||||||
@ -30,12 +26,7 @@ class Migration(migrations.Migration):
|
|||||||
name="managed",
|
name="managed",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
default=None,
|
default=None,
|
||||||
help_text=(
|
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||||
"Objects which are managed by authentik. These objects are created and updated"
|
|
||||||
" automatically. This is flag only indicates that an object can be overwritten"
|
|
||||||
" by migrations. You can still modify the objects via the API, but expect"
|
|
||||||
" changes to be overwritten in a later update."
|
|
||||||
),
|
|
||||||
null=True,
|
null=True,
|
||||||
verbose_name="Managed by authentik",
|
verbose_name="Managed by authentik",
|
||||||
unique=True,
|
unique=True,
|
||||||
|
@ -46,9 +46,13 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
|
|||||||
akadmin = User.objects.using(db_alias).filter(username="akadmin")
|
akadmin = User.objects.using(db_alias).filter(username="akadmin")
|
||||||
if not akadmin.exists():
|
if not akadmin.exists():
|
||||||
return
|
return
|
||||||
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
|
key = None
|
||||||
return
|
if "AK_ADMIN_TOKEN" in environ:
|
||||||
|
key = environ["AK_ADMIN_TOKEN"]
|
||||||
|
if "AUTHENTIK_BOOTSTRAP_TOKEN" in environ:
|
||||||
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
|
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
|
||||||
|
if not key:
|
||||||
|
return
|
||||||
Token.objects.using(db_alias).create(
|
Token.objects.using(db_alias).create(
|
||||||
identifier="authentik-bootstrap-token",
|
identifier="authentik-bootstrap-token",
|
||||||
user=akadmin.first(),
|
user=akadmin.first(),
|
||||||
@ -59,6 +63,7 @@ def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEdito
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_core", "0018_auto_20210330_1345"),
|
("authentik_core", "0018_auto_20210330_1345"),
|
||||||
("authentik_core", "0019_source_managed"),
|
("authentik_core", "0019_source_managed"),
|
||||||
@ -91,12 +96,7 @@ class Migration(migrations.Migration):
|
|||||||
name="managed",
|
name="managed",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
default=None,
|
default=None,
|
||||||
help_text=(
|
help_text="Objects which are managed by authentik. These objects are created and updated automatically. This is flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||||
"Objects which are managed by authentik. These objects are created and updated"
|
|
||||||
" automatically. This is flag only indicates that an object can be overwritten"
|
|
||||||
" by migrations. You can still modify the objects via the API, but expect"
|
|
||||||
" changes to be overwritten in a later update."
|
|
||||||
),
|
|
||||||
null=True,
|
null=True,
|
||||||
unique=True,
|
unique=True,
|
||||||
verbose_name="Managed by authentik",
|
verbose_name="Managed by authentik",
|
||||||
@ -110,38 +110,23 @@ class Migration(migrations.Migration):
|
|||||||
("identifier", "Use the source-specific identifier"),
|
("identifier", "Use the source-specific identifier"),
|
||||||
(
|
(
|
||||||
"email_link",
|
"email_link",
|
||||||
(
|
"Link to a user with identical email address. Can have security implications when a source doesn't validate email addresses.",
|
||||||
"Link to a user with identical email address. Can have security"
|
|
||||||
" implications when a source doesn't validate email addresses."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"email_deny",
|
"email_deny",
|
||||||
(
|
"Use the user's email address, but deny enrollment when the email address already exists.",
|
||||||
"Use the user's email address, but deny enrollment when the email"
|
|
||||||
" address already exists."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"username_link",
|
"username_link",
|
||||||
(
|
"Link to a user with identical username. Can have security implications when a username is used with another source.",
|
||||||
"Link to a user with identical username. Can have security implications"
|
|
||||||
" when a username is used with another source."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"username_deny",
|
"username_deny",
|
||||||
(
|
"Use the user's username, but deny enrollment when the username already exists.",
|
||||||
"Use the user's username, but deny enrollment when the username already"
|
|
||||||
" exists."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
default="identifier",
|
default="identifier",
|
||||||
help_text=(
|
help_text="How the source determines if an existing user should be authenticated or a new user enrolled.",
|
||||||
"How the source determines if an existing user should be authenticated or a new"
|
|
||||||
" user enrolled."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@ -182,9 +167,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name="application",
|
model_name="application",
|
||||||
name="meta_launch_url",
|
name="meta_launch_url",
|
||||||
field=models.TextField(
|
field=models.TextField(
|
||||||
blank=True,
|
blank=True, default="", validators=[authentik.lib.models.DomainlessURLValidator()]
|
||||||
default="",
|
|
||||||
validators=[authentik.lib.models.DomainlessFormattedURLValidator()],
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.RunPython(
|
migrations.RunPython(
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
|
("authentik_core", "0018_auto_20210330_1345_squashed_0028_alter_token_intent"),
|
||||||
]
|
]
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0019_application_group"),
|
("authentik_core", "0019_application_group"),
|
||||||
]
|
]
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0020_application_open_in_new_tab"),
|
("authentik_core", "0020_application_open_in_new_tab"),
|
||||||
]
|
]
|
||||||
|
@ -5,6 +5,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0021_source_user_path_user_path"),
|
("authentik_core", "0021_source_user_path_user_path"),
|
||||||
]
|
]
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0022_alter_group_parent"),
|
("authentik_core", "0022_alter_group_parent"),
|
||||||
]
|
]
|
||||||
|
@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_core", "0023_source_authentik_c_slug_ccb2e5_idx_and_more"),
|
("authentik_core", "0023_source_authentik_c_slug_ccb2e5_idx_and_more"),
|
||||||
]
|
]
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
# Generated by Django 4.1.7 on 2023-03-02 21:32
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
|
|
||||||
("authentik_core", "0024_source_icon"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="provider",
|
|
||||||
name="authorization_flow",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
help_text="Flow used when authorizing this provider.",
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
related_name="provider_authorization",
|
|
||||||
to="authentik_flows.flow",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,26 +0,0 @@
|
|||||||
# Generated by Django 4.1.7 on 2023-03-07 13:41
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
from authentik.lib.migrations import fallback_names
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_core", "0025_alter_provider_authorization_flow"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(fallback_names("authentik_core", "propertymapping", "name")),
|
|
||||||
migrations.RunPython(fallback_names("authentik_core", "provider", "name")),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="propertymapping",
|
|
||||||
name="name",
|
|
||||||
field=models.TextField(unique=True),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="provider",
|
|
||||||
name="name",
|
|
||||||
field=models.TextField(unique=True),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,19 +0,0 @@
|
|||||||
# Generated by Django 4.1.7 on 2023-03-19 21:57
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_core", "0026_alter_propertymapping_name_alter_provider_name"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="user",
|
|
||||||
name="uuid",
|
|
||||||
field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,25 +0,0 @@
|
|||||||
# Generated by Django 4.1.7 on 2023-03-23 21:44
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
|
|
||||||
("authentik_core", "0027_alter_user_uuid"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="provider",
|
|
||||||
name="authentication_flow",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
help_text="Flow used for authentication when the associated application is accessed by an un-authenticated user.",
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_NULL,
|
|
||||||
related_name="provider_authentication",
|
|
||||||
to="authentik_flows.flow",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,7 +1,8 @@
|
|||||||
"""authentik core models"""
|
"""authentik core models"""
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import sha256
|
from hashlib import md5, sha256
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import urlencode
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
@ -12,7 +13,9 @@ from django.contrib.auth.models import UserManager as DjangoUserManager
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, QuerySet, options
|
from django.db.models import Q, QuerySet, options
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
from django.templatetags.static import static
|
||||||
from django.utils.functional import SimpleLazyObject, cached_property
|
from django.utils.functional import SimpleLazyObject, cached_property
|
||||||
|
from django.utils.html import escape
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from guardian.mixins import GuardianUserMixin
|
from guardian.mixins import GuardianUserMixin
|
||||||
@ -22,18 +25,13 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.blueprints.models import ManagedModel
|
from authentik.blueprints.models import ManagedModel
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
|
from authentik.core.signals import password_changed
|
||||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||||
from authentik.lib.avatars import get_avatar
|
from authentik.lib.config import CONFIG, get_path_from_dict
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.lib.models import (
|
from authentik.lib.models import CreatedUpdatedModel, DomainlessURLValidator, SerializerModel
|
||||||
CreatedUpdatedModel,
|
|
||||||
DomainlessFormattedURLValidator,
|
|
||||||
SerializerModel,
|
|
||||||
)
|
|
||||||
from authentik.lib.utils.http import get_client_ip
|
from authentik.lib.utils.http import get_client_ip
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
from authentik.tenants.utils import get_tenant
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
||||||
@ -51,6 +49,9 @@ USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
|||||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||||
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
|
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
|
||||||
|
|
||||||
|
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||||
|
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
||||||
|
|
||||||
|
|
||||||
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ("authentik_used_by_shadows",)
|
options.DEFAULT_NAMES = options.DEFAULT_NAMES + ("authentik_used_by_shadows",)
|
||||||
|
|
||||||
@ -128,6 +129,7 @@ class Group(SerializerModel):
|
|||||||
return f"Group {self.name}"
|
return f"Group {self.name}"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
unique_together = (
|
unique_together = (
|
||||||
(
|
(
|
||||||
"name",
|
"name",
|
||||||
@ -147,7 +149,7 @@ class UserManager(DjangoUserManager):
|
|||||||
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
"""Custom User model to allow easier adding of user-based settings"""
|
"""Custom User model to allow easier adding of user-based settings"""
|
||||||
|
|
||||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
uuid = models.UUIDField(default=uuid4, editable=False)
|
||||||
name = models.TextField(help_text=_("User's display name."))
|
name = models.TextField(help_text=_("User's display name."))
|
||||||
path = models.TextField(default="users")
|
path = models.TextField(default="users")
|
||||||
|
|
||||||
@ -169,7 +171,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
including the users attributes"""
|
including the users attributes"""
|
||||||
final_attributes = {}
|
final_attributes = {}
|
||||||
if request and hasattr(request, "tenant"):
|
if request and hasattr(request, "tenant"):
|
||||||
always_merger.merge(final_attributes, get_tenant(request).attributes)
|
always_merger.merge(final_attributes, request.tenant.attributes)
|
||||||
for group in self.ak_groups.all().order_by("name"):
|
for group in self.ak_groups.all().order_by("name"):
|
||||||
always_merger.merge(final_attributes, group.attributes)
|
always_merger.merge(final_attributes, group.attributes)
|
||||||
always_merger.merge(final_attributes, self.attributes)
|
always_merger.merge(final_attributes, self.attributes)
|
||||||
@ -193,8 +195,6 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
|
|
||||||
def set_password(self, raw_password, signal=True):
|
def set_password(self, raw_password, signal=True):
|
||||||
if self.pk and signal:
|
if self.pk and signal:
|
||||||
from authentik.core.signals import password_changed
|
|
||||||
|
|
||||||
password_changed.send(sender=self, user=self, password=raw_password)
|
password_changed.send(sender=self, user=self, password=raw_password)
|
||||||
self.password_change_date = now()
|
self.password_change_date = now()
|
||||||
return super().set_password(raw_password)
|
return super().set_password(raw_password)
|
||||||
@ -228,15 +228,34 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||||
if request:
|
if request:
|
||||||
return get_tenant(request).default_locale
|
return request.tenant.locale
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def avatar(self) -> str:
|
def avatar(self) -> str:
|
||||||
"""Get avatar, depending on authentik.avatar setting"""
|
"""Get avatar, depending on authentik.avatar setting"""
|
||||||
return get_avatar(self)
|
mode: str = CONFIG.y("avatars", "none")
|
||||||
|
if mode == "none":
|
||||||
|
return DEFAULT_AVATAR
|
||||||
|
if mode.startswith("attributes."):
|
||||||
|
return get_path_from_dict(self.attributes, mode[11:], default=DEFAULT_AVATAR)
|
||||||
|
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||||
|
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
|
||||||
|
if mode == "gravatar":
|
||||||
|
parameters = [
|
||||||
|
("s", "158"),
|
||||||
|
("r", "g"),
|
||||||
|
]
|
||||||
|
gravatar_url = f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||||
|
return escape(gravatar_url)
|
||||||
|
return mode % {
|
||||||
|
"username": self.username,
|
||||||
|
"mail_hash": mail_hash,
|
||||||
|
"upn": self.attributes.get("upn", ""),
|
||||||
|
}
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
permissions = (
|
permissions = (
|
||||||
("reset_user_password", "Reset Password"),
|
("reset_user_password", "Reset Password"),
|
||||||
("impersonate", "Can impersonate other users"),
|
("impersonate", "Can impersonate other users"),
|
||||||
@ -248,23 +267,11 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
class Provider(SerializerModel):
|
class Provider(SerializerModel):
|
||||||
"""Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application"""
|
"""Application-independent Provider instance. For example SAML2 Remote, OAuth2 Application"""
|
||||||
|
|
||||||
name = models.TextField(unique=True)
|
name = models.TextField()
|
||||||
|
|
||||||
authentication_flow = models.ForeignKey(
|
|
||||||
"authentik_flows.Flow",
|
|
||||||
null=True,
|
|
||||||
on_delete=models.SET_NULL,
|
|
||||||
help_text=_(
|
|
||||||
"Flow used for authentication when the associated application is accessed by an "
|
|
||||||
"un-authenticated user."
|
|
||||||
),
|
|
||||||
related_name="provider_authentication",
|
|
||||||
)
|
|
||||||
|
|
||||||
authorization_flow = models.ForeignKey(
|
authorization_flow = models.ForeignKey(
|
||||||
"authentik_flows.Flow",
|
"authentik_flows.Flow",
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
null=True,
|
|
||||||
help_text=_("Flow used when authorizing this provider."),
|
help_text=_("Flow used when authorizing this provider."),
|
||||||
related_name="provider_authorization",
|
related_name="provider_authorization",
|
||||||
)
|
)
|
||||||
@ -307,7 +314,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
meta_launch_url = models.TextField(
|
meta_launch_url = models.TextField(
|
||||||
default="", blank=True, validators=[DomainlessFormattedURLValidator()]
|
default="", blank=True, validators=[DomainlessURLValidator()]
|
||||||
)
|
)
|
||||||
|
|
||||||
open_in_new_tab = models.BooleanField(
|
open_in_new_tab = models.BooleanField(
|
||||||
@ -375,6 +382,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
|||||||
return str(self.name)
|
return str(self.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
verbose_name = _("Application")
|
verbose_name = _("Application")
|
||||||
verbose_name_plural = _("Applications")
|
verbose_name_plural = _("Applications")
|
||||||
|
|
||||||
@ -384,16 +392,20 @@ class SourceUserMatchingModes(models.TextChoices):
|
|||||||
|
|
||||||
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
||||||
EMAIL_LINK = "email_link", _(
|
EMAIL_LINK = "email_link", _(
|
||||||
|
(
|
||||||
"Link to a user with identical email address. Can have security implications "
|
"Link to a user with identical email address. Can have security implications "
|
||||||
"when a source doesn't validate email addresses."
|
"when a source doesn't validate email addresses."
|
||||||
)
|
)
|
||||||
|
)
|
||||||
EMAIL_DENY = "email_deny", _(
|
EMAIL_DENY = "email_deny", _(
|
||||||
"Use the user's email address, but deny enrollment when the email address already exists."
|
"Use the user's email address, but deny enrollment when the email address already exists."
|
||||||
)
|
)
|
||||||
USERNAME_LINK = "username_link", _(
|
USERNAME_LINK = "username_link", _(
|
||||||
|
(
|
||||||
"Link to a user with identical username. Can have security implications "
|
"Link to a user with identical username. Can have security implications "
|
||||||
"when a username is used with another source."
|
"when a username is used with another source."
|
||||||
)
|
)
|
||||||
|
)
|
||||||
USERNAME_DENY = "username_deny", _(
|
USERNAME_DENY = "username_deny", _(
|
||||||
"Use the user's username, but deny enrollment when the username already exists."
|
"Use the user's username, but deny enrollment when the username already exists."
|
||||||
)
|
)
|
||||||
@ -439,8 +451,10 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
choices=SourceUserMatchingModes.choices,
|
choices=SourceUserMatchingModes.choices,
|
||||||
default=SourceUserMatchingModes.IDENTIFIER,
|
default=SourceUserMatchingModes.IDENTIFIER,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
|
(
|
||||||
"How the source determines if an existing user should be authenticated or "
|
"How the source determines if an existing user should be authenticated or "
|
||||||
"a new user enrolled."
|
"a new user enrolled."
|
||||||
|
)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -486,6 +500,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
return str(self.name)
|
return str(self.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
indexes = [
|
indexes = [
|
||||||
models.Index(
|
models.Index(
|
||||||
fields=[
|
fields=[
|
||||||
@ -514,6 +529,7 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
unique_together = (("user", "source"),)
|
unique_together = (("user", "source"),)
|
||||||
|
|
||||||
|
|
||||||
@ -546,6 +562,7 @@ class ExpiringModel(models.Model):
|
|||||||
return now() > self.expires
|
return now() > self.expires
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
@ -611,6 +628,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
|||||||
return description
|
return description
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
verbose_name = _("Token")
|
verbose_name = _("Token")
|
||||||
verbose_name_plural = _("Tokens")
|
verbose_name_plural = _("Tokens")
|
||||||
indexes = [
|
indexes = [
|
||||||
@ -624,7 +642,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
|||||||
"""User-defined key -> x mapping which can be used by providers to expose extra data."""
|
"""User-defined key -> x mapping which can be used by providers to expose extra data."""
|
||||||
|
|
||||||
pm_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
pm_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
name = models.TextField(unique=True)
|
name = models.TextField()
|
||||||
expression = models.TextField()
|
expression = models.TextField()
|
||||||
|
|
||||||
objects = InheritanceManager()
|
objects = InheritanceManager()
|
||||||
@ -647,12 +665,13 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
|||||||
try:
|
try:
|
||||||
return evaluator.evaluate(self.expression)
|
return evaluator.evaluate(self.expression)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise PropertyMappingExpressionException(exc) from exc
|
raise PropertyMappingExpressionException(str(exc)) from exc
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Property Mapping {self.name}"
|
return f"Property Mapping {self.name}"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
verbose_name = _("Property Mapping")
|
verbose_name = _("Property Mapping")
|
||||||
verbose_name_plural = _("Property Mappings")
|
verbose_name_plural = _("Property Mappings")
|
||||||
|
|
||||||
@ -689,5 +708,6 @@ class AuthenticatedSession(ExpiringModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
verbose_name = _("Authenticated Session")
|
verbose_name = _("Authenticated Session")
|
||||||
verbose_name_plural = _("Authenticated Sessions")
|
verbose_name_plural = _("Authenticated Sessions")
|
||||||
|
@ -10,33 +10,36 @@ from django.db.models.signals import post_save, pre_delete
|
|||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import Application, AuthenticatedSession
|
|
||||||
|
|
||||||
# Arguments: user: User, password: str
|
# Arguments: user: User, password: str
|
||||||
password_changed = Signal()
|
password_changed = Signal()
|
||||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||||
login_failed = Signal()
|
login_failed = Signal()
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from authentik.core.models import User
|
from authentik.core.models import AuthenticatedSession, User
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Application)
|
@receiver(post_save)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||||
"""Clear user's application cache upon application creation"""
|
"""Clear user's application cache upon application creation"""
|
||||||
from authentik.core.api.applications import user_app_cache_key
|
from authentik.core.api.applications import user_app_cache_key
|
||||||
|
from authentik.core.models import Application
|
||||||
|
|
||||||
|
if sender != Application:
|
||||||
|
return
|
||||||
if not created: # pragma: no cover
|
if not created: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
# Also delete user application cache
|
# Also delete user application cache
|
||||||
keys = cache.keys(user_app_cache_key("*"))
|
keys = cache.keys(user_app_cache_key("*"))
|
||||||
cache.delete_many(keys)
|
cache.delete_many(keys)
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_in)
|
@receiver(user_logged_in)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
||||||
"""Create an AuthenticatedSession from request"""
|
"""Create an AuthenticatedSession from request"""
|
||||||
|
from authentik.core.models import AuthenticatedSession
|
||||||
|
|
||||||
session = AuthenticatedSession.from_request(request, user)
|
session = AuthenticatedSession.from_request(request, user)
|
||||||
if session:
|
if session:
|
||||||
@ -44,13 +47,21 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
|||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
||||||
"""Delete AuthenticatedSession if it exists"""
|
"""Delete AuthenticatedSession if it exists"""
|
||||||
|
from authentik.core.models import AuthenticatedSession
|
||||||
|
|
||||||
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
@receiver(pre_delete)
|
||||||
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
||||||
"""Delete session when authenticated session is deleted"""
|
"""Delete session when authenticated session is deleted"""
|
||||||
|
from authentik.core.models import AuthenticatedSession
|
||||||
|
|
||||||
|
if sender != AuthenticatedSession:
|
||||||
|
return
|
||||||
|
|
||||||
cache_key = f"{KEY_PREFIX}{instance.session_key}"
|
cache_key = f"{KEY_PREFIX}{instance.session_key}"
|
||||||
cache.delete(cache_key)
|
cache.delete(cache_key)
|
||||||
|
@ -25,8 +25,7 @@ from authentik.flows.planner import (
|
|||||||
)
|
)
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||||
from authentik.interfaces.models import InterfaceType
|
from authentik.lib.utils.urls import redirect_with_qs
|
||||||
from authentik.interfaces.views import redirect_to_default_interface
|
|
||||||
from authentik.lib.views import bad_request_message
|
from authentik.lib.views import bad_request_message
|
||||||
from authentik.policies.denied import AccessDeniedResponse
|
from authentik.policies.denied import AccessDeniedResponse
|
||||||
from authentik.policies.utils import delete_none_keys
|
from authentik.policies.utils import delete_none_keys
|
||||||
@ -49,6 +48,7 @@ class Action(Enum):
|
|||||||
class MessageStage(StageView):
|
class MessageStage(StageView):
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
message = getattr(self.executor.current_stage, "message", "")
|
message = getattr(self.executor.current_stage, "message", "")
|
||||||
@ -191,8 +191,11 @@ class SourceFlowManager:
|
|||||||
# Default case, assume deny
|
# Default case, assume deny
|
||||||
error = Exception(
|
error = Exception(
|
||||||
_(
|
_(
|
||||||
|
(
|
||||||
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
||||||
"with the source you've previously signed up with." % {"source": self.source.name}
|
"with the source you've previously signed up with."
|
||||||
|
)
|
||||||
|
% {"source": self.source.name}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
return self.error_handler(error)
|
return self.error_handler(error)
|
||||||
@ -206,6 +209,7 @@ class SourceFlowManager:
|
|||||||
response.error_message = error.messages
|
response.error_message = error.messages
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||||
"""Hook to override stages which are appended to the flow"""
|
"""Hook to override stages which are appended to the flow"""
|
||||||
if not self.source.enrollment_flow:
|
if not self.source.enrollment_flow:
|
||||||
@ -227,7 +231,7 @@ class SourceFlowManager:
|
|||||||
# Ensure redirect is carried through when user was trying to
|
# Ensure redirect is carried through when user was trying to
|
||||||
# authorize application
|
# authorize application
|
||||||
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
|
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
|
||||||
NEXT_ARG_NAME, "authentik_core:root-redirect"
|
NEXT_ARG_NAME, "authentik_core:if-user"
|
||||||
)
|
)
|
||||||
kwargs.update(
|
kwargs.update(
|
||||||
{
|
{
|
||||||
@ -254,12 +258,13 @@ class SourceFlowManager:
|
|||||||
for stage in stages:
|
for stage in stages:
|
||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
return redirect_to_default_interface(
|
return redirect_with_qs(
|
||||||
self.request,
|
"authentik_core:if-flow",
|
||||||
InterfaceType.FLOW,
|
self.request.GET,
|
||||||
flow_slug=flow.slug,
|
flow_slug=flow.slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
def handle_auth(
|
def handle_auth(
|
||||||
self,
|
self,
|
||||||
connection: UserSourceConnection,
|
connection: UserSourceConnection,
|
||||||
@ -300,9 +305,8 @@ class SourceFlowManager:
|
|||||||
_("Successfully linked %(source)s!" % {"source": self.source.name}),
|
_("Successfully linked %(source)s!" % {"source": self.source.name}),
|
||||||
)
|
)
|
||||||
return redirect(
|
return redirect(
|
||||||
# Not ideal that we don't directly redirect to the configured user interface
|
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_core:root-redirect",
|
"authentik_core:if-user",
|
||||||
)
|
)
|
||||||
+ f"#/settings;page-{self.source.slug}"
|
+ f"#/settings;page-{self.source.slug}"
|
||||||
)
|
)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user