Compare commits
280 Commits
version/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
| 24eb4ed963 | |||
| 0e6400bfea | |||
| be308b3392 | |||
| 62aa4336a8 | |||
| b16d1134ea | |||
| 78f7eb4345 | |||
| 1615723f10 | |||
| f9b46145de | |||
| 20a4dfd13d | |||
| 4a6f8d2ef2 | |||
| ffdc1aa9c2 | |||
| 138801c18b | |||
| 8f3579ba45 | |||
| 3eecc76717 | |||
| 0488d36257 | |||
| 340bf54315 | |||
| b33f3d9cc8 | |||
| dbaf03430e | |||
| f5738804ff | |||
| bfa0360764 | |||
| ae13fc3b92 | |||
| 7046944bf6 | |||
| 0423023d2e | |||
| 5132f0f876 | |||
| 7e44de2da9 | |||
| 08b0075335 | |||
| efbab9e37f | |||
| 8195e6d4ff | |||
| 700a4cb72c | |||
| 94b9ebb0bb | |||
| fe1e2aa8af | |||
| 7835f3d873 | |||
| 4a50c65cad | |||
| 283c93c57b | |||
| 1b86a3d5d6 | |||
| 8b710b57a5 | |||
| 716584bbae | |||
| 9dc0bb2a77 | |||
| debbcb125b | |||
| 2d827eaae1 | |||
| 47d79ac28c | |||
| 61f2b73255 | |||
| 9f846d94be | |||
| 84fbeb5721 | |||
| 01da8e1792 | |||
| 6a3a3e5f8d | |||
| 42c278b4f8 | |||
| e49bc83266 | |||
| 98b7ebec74 | |||
| ccb43a3dfb | |||
| c92b2620f5 | |||
| e2bfeefc8b | |||
| e52c964354 | |||
| c635487210 | |||
| ca6cd8a4d3 | |||
| fb09df26c9 | |||
| 30f4a09a88 | |||
| 7143ea08e6 | |||
| e4e7a112e3 | |||
| 4c133b957c | |||
| 28eb7c03fa | |||
| 7b01a208a2 | |||
| db0af3763b | |||
| ab9efcea77 | |||
| d280577830 | |||
| 36da29aaa2 | |||
| 9e1204b645 | |||
| ea2f69a8f8 | |||
| 55a705e777 | |||
| cb10289b68 | |||
| 423776c7a2 | |||
| e5cfddfc57 | |||
| 1564b898db | |||
| 3b61c6f9b9 | |||
| 042865c606 | |||
| 7f662ac2f3 | |||
| e9f5d7aefe | |||
| 609f95ac97 | |||
| 0181a90d98 | |||
| 243f335718 | |||
| f4990bb5da | |||
| 980d2a022c | |||
| 81fdd097c6 | |||
| 2b4c9657a6 | |||
| 45d30213b3 | |||
| 7884ff07bb | |||
| bacf2afed1 | |||
| 67b45fc4e3 | |||
| c28f3ab225 | |||
| 027ca88d83 | |||
| 9d5b9204fc | |||
| 39e0ed2962 | |||
| 3b973e12a4 | |||
| d80573bdc5 | |||
| 4182bfd8b5 | |||
| 07a5b49454 | |||
| 16be699190 | |||
| e523dd188c | |||
| 73a2682ed6 | |||
| 3d9f8c80a5 | |||
| 754061dba5 | |||
| 48c520150f | |||
| a754196a48 | |||
| 23fce4e74d | |||
| ab05abe787 | |||
| 67c8febb33 | |||
| 5d397716de | |||
| 5a7c46b3ef | |||
| ec925491b2 | |||
| 2d18c1bb6f | |||
| 2aba32de19 | |||
| a13dc847f0 | |||
| d66670f6ac | |||
| 3418943949 | |||
| f5c89f68a4 | |||
| 8fc942fbf4 | |||
| 83d2c8fc33 | |||
| 89839096ee | |||
| a08d4bc720 | |||
| 7674ef3950 | |||
| 72c474f3b1 | |||
| 1dfc0b2e93 | |||
| 291573fbc5 | |||
| 0995658ca6 | |||
| 53f3764879 | |||
| bdd8b59ab9 | |||
| c3a8e35a2f | |||
| c979be6e25 | |||
| b7092cc307 | |||
| 3aa262efbe | |||
| 3cc326bca8 | |||
| 168c34f172 | |||
| b3da1d223c | |||
| 107f2745c8 | |||
| 6f9002eb01 | |||
| 12db0637ec | |||
| 8d169a8bd9 | |||
| f47ce9a360 | |||
| 4816b90378 | |||
| 01a897dbc2 | |||
| 45eb8baee8 | |||
| 4bf6cfc4d8 | |||
| fddcb3a835 | |||
| 5d51621278 | |||
| 9ffc720f48 | |||
| b6b72e389d | |||
| 5ae593bc00 | |||
| 44fe477c3c | |||
| 43bc60610d | |||
| c21c1757de | |||
| d3197f3430 | |||
| 3d23770e9d | |||
| 0fc0a62279 | |||
| 4da370b458 | |||
| aa3e085536 | |||
| 253b676f7d | |||
| 9f4f911fd3 | |||
| 6ebfb5138c | |||
| ab8ed8599e | |||
| c76fb2eed0 | |||
| 4d8978ea90 | |||
| 64540cc870 | |||
| 5b05884a2b | |||
| eef3ef2165 | |||
| 235296c749 | |||
| 8d13235b74 | |||
| 5ef5c70490 | |||
| 3fe627528e | |||
| 674eeed763 | |||
| 4bd91180df | |||
| 0af4824fa6 | |||
| 64eb953593 | |||
| 45704cf20a | |||
| b5714afac7 | |||
| ff109206fd | |||
| 49bd028363 | |||
| 44bf9a890e | |||
| b60c6d4144 | |||
| ef239e6430 | |||
| 58cd6007b2 | |||
| 1dcf6e8962 | |||
| db95dfe38d | |||
| 860c85d012 | |||
| 6ca1654129 | |||
| a2dc594a44 | |||
| c6bc8e2ddf | |||
| 48a234e86f | |||
| cf521eba5a | |||
| 52ebc78aaa | |||
| 1f7d52c5ce | |||
| 3251bdc220 | |||
| 93fee5f0e5 | |||
| 46c8db7f4b | |||
| fc74c0209a | |||
| 07bfc3da1e | |||
| 2a4daa5360 | |||
| e1a6dede54 | |||
| cf40e5047e | |||
| 17ee076f3d | |||
| 4d12a98c5d | |||
| d5329432fe | |||
| 8a926aaa73 | |||
| 5156aeee0f | |||
| 1690812936 | |||
| c693a2c3f4 | |||
| d6cac5c765 | |||
| 2722b9b7ea | |||
| 014fc6169a | |||
| a7a722c9c0 | |||
| da581dde70 | |||
| 17fc775fd3 | |||
| eb57c787f3 | |||
| 97e789323a | |||
| 290f576641 | |||
| 9723aa11df | |||
| 4e04461820 | |||
| 147ebf1a5e | |||
| e22fce02f8 | |||
| 3b8cb9e525 | |||
| beffb72e3b | |||
| b5c53d5e40 | |||
| 477dbc6daf | |||
| 3aaabdcc9d | |||
| d045b0be1a | |||
| e2bd96c5de | |||
| be9790ef8a | |||
| f8ef2b666f | |||
| 7bc63791c9 | |||
| a9909fcf6d | |||
| 1fa9b3a996 | |||
| 5019346ab6 | |||
| f22f1ebcde | |||
| 1e328436d8 | |||
| cb9a759aa0 | |||
| b80c528531 | |||
| e03d2c06a8 | |||
| 501d63b3aa | |||
| 1c2cdfe06a | |||
| 118555c97a | |||
| 6af9fbc94e | |||
| 3020f9506e | |||
| ce9c6a9689 | |||
| 8f2d573721 | |||
| 97c31d0a21 | |||
| 46d28d8082 | |||
| d248dd5b1b | |||
| 474677017f | |||
| 0813a49ca5 | |||
| d0308a8239 | |||
| 6843c8389b | |||
| 7b0f89398d | |||
| 97b867298a | |||
| 76d5cbcea9 | |||
| 2b925536d3 | |||
| 4baa5ae7a2 | |||
| 3f9d4f7083 | |||
| 10186a2e67 | |||
| 3a13d19695 | |||
| ed7bef9dbf | |||
| 4a17795df9 | |||
| 07b1aea767 | |||
| ab0f8d027d | |||
| b9fdb63a57 | |||
| 94833dd1e7 | |||
| 5262d89505 | |||
| ab3d47c437 | |||
| 14cd52686d | |||
| 1a39754fe9 | |||
| 8599eba863 | |||
| 4c6d21820e | |||
| ddee1c9a8c | |||
| 84678c41a8 | |||
| 7e1059dd43 | |||
| bc56ea6822 | |||
| 768dc55a71 | |||
| a0719ca65e | |||
| 38c8555f36 | |||
| 5b8223808e | |||
| 14f341f504 | |||
| c30aa90888 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2022.11.0
|
current_version = 2022.12.0
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
env
|
env
|
||||||
static
|
|
||||||
htmlcov
|
htmlcov
|
||||||
*.env.yml
|
*.env.yml
|
||||||
**/node_modules
|
**/node_modules
|
||||||
dist/**
|
dist/**
|
||||||
build/**
|
build/**
|
||||||
build_docs/**
|
build_docs/**
|
||||||
|
Dockerfile
|
||||||
|
|||||||
5
.github/workflows/ci-main.yml
vendored
5
.github/workflows/ci-main.yml
vendored
@ -99,7 +99,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
uses: helm/kind-action@v1.4.0
|
uses: helm/kind-action@v1.5.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run make test-integration
|
poetry run make test-integration
|
||||||
@ -208,6 +208,9 @@ jobs:
|
|||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
|
secrets: |
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
tags: |
|
||||||
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
|
||||||
|
|||||||
15
.github/workflows/release-publish.yml
vendored
15
.github/workflows/release-publish.yml
vendored
@ -31,6 +31,9 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
|
secrets:
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
tags: |
|
tags: |
|
||||||
beryju/authentik:${{ steps.ev.outputs.version }},
|
beryju/authentik:${{ steps.ev.outputs.version }},
|
||||||
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
beryju/authentik:${{ steps.ev.outputs.versionFamily }},
|
||||||
@ -39,7 +42,8 @@ jobs:
|
|||||||
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
ghcr.io/goauthentik/server:${{ steps.ev.outputs.versionFamily }},
|
||||||
ghcr.io/goauthentik/server:latest
|
ghcr.io/goauthentik/server:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
build-args: |
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
@ -84,6 +88,11 @@ jobs:
|
|||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
secrets: |
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
build-args: |
|
||||||
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -161,11 +170,9 @@ jobs:
|
|||||||
if: ${{ github.event_name == 'release' }}
|
if: ${{ github.event_name == 'release' }}
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: beryjuorg
|
SENTRY_ORG: authentik-security-inc
|
||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
|
||||||
with:
|
with:
|
||||||
version: authentik@${{ steps.ev.outputs.version }}
|
version: authentik@${{ steps.ev.outputs.version }}
|
||||||
environment: beryjuorg-prod
|
|
||||||
sourcemaps: './web/dist'
|
sourcemaps: './web/dist'
|
||||||
url_prefix: '~/static/dist'
|
url_prefix: '~/static/dist'
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -194,11 +194,9 @@ pip-selfcheck.json
|
|||||||
/static/
|
/static/
|
||||||
local.env.yml
|
local.env.yml
|
||||||
|
|
||||||
# Selenium Screenshots
|
|
||||||
selenium_screenshots/
|
|
||||||
backups/
|
|
||||||
media/
|
media/
|
||||||
*mmdb
|
*mmdb
|
||||||
|
|
||||||
.idea/
|
.idea/
|
||||||
/gen-*/
|
/gen-*/
|
||||||
|
data/
|
||||||
|
|||||||
11
.vscode/settings.json
vendored
11
.vscode/settings.json
vendored
@ -20,7 +20,16 @@
|
|||||||
"todo-tree.tree.showCountsInTree": true,
|
"todo-tree.tree.showCountsInTree": true,
|
||||||
"todo-tree.tree.showBadges": true,
|
"todo-tree.tree.showBadges": true,
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "black",
|
||||||
"yaml.customTags": ["!Find sequence", "!KeyOf scalar"],
|
"yaml.customTags": [
|
||||||
|
"!Find sequence",
|
||||||
|
"!KeyOf scalar",
|
||||||
|
"!Context scalar",
|
||||||
|
"!Context sequence",
|
||||||
|
"!Format sequence",
|
||||||
|
"!Condition sequence",
|
||||||
|
"!Env sequence",
|
||||||
|
"!Env scalar"
|
||||||
|
],
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||||
"typescript.tsdk": "./web/node_modules/typescript/lib",
|
"typescript.tsdk": "./web/node_modules/typescript/lib",
|
||||||
|
|||||||
@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
|
|||||||
Examples of behavior that contributes to a positive environment for our
|
Examples of behavior that contributes to a positive environment for our
|
||||||
community include:
|
community include:
|
||||||
|
|
||||||
* Demonstrating empathy and kindness toward other people
|
- Demonstrating empathy and kindness toward other people
|
||||||
* Being respectful of differing opinions, viewpoints, and experiences
|
- Being respectful of differing opinions, viewpoints, and experiences
|
||||||
* Giving and gracefully accepting constructive feedback
|
- Giving and gracefully accepting constructive feedback
|
||||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
and learning from the experience
|
and learning from the experience
|
||||||
* Focusing on what is best not just for us as individuals, but for the
|
- Focusing on what is best not just for us as individuals, but for the
|
||||||
overall community
|
overall community
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
* The use of sexualized language or imagery, and sexual attention or
|
- The use of sexualized language or imagery, and sexual attention or
|
||||||
advances of any kind
|
advances of any kind
|
||||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
* Public or private harassment
|
- Public or private harassment
|
||||||
* Publishing others' private information, such as a physical or email
|
- Publishing others' private information, such as a physical or email
|
||||||
address, without their explicit permission
|
address, without their explicit permission
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
professional setting
|
professional setting
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
## Enforcement Responsibilities
|
||||||
|
|||||||
@ -11,19 +11,22 @@ The following is a set of guidelines for contributing to authentik and its compo
|
|||||||
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
[I don't want to read this whole thing, I just have a question!!!](#i-dont-want-to-read-this-whole-thing-i-just-have-a-question)
|
||||||
|
|
||||||
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
[What should I know before I get started?](#what-should-i-know-before-i-get-started)
|
||||||
* [The components](#the-components)
|
|
||||||
* [authentik's structure](#authentiks-structure)
|
- [The components](#the-components)
|
||||||
|
- [authentik's structure](#authentiks-structure)
|
||||||
|
|
||||||
[How Can I Contribute?](#how-can-i-contribute)
|
[How Can I Contribute?](#how-can-i-contribute)
|
||||||
* [Reporting Bugs](#reporting-bugs)
|
|
||||||
* [Suggesting Enhancements](#suggesting-enhancements)
|
- [Reporting Bugs](#reporting-bugs)
|
||||||
* [Your First Code Contribution](#your-first-code-contribution)
|
- [Suggesting Enhancements](#suggesting-enhancements)
|
||||||
* [Pull Requests](#pull-requests)
|
- [Your First Code Contribution](#your-first-code-contribution)
|
||||||
|
- [Pull Requests](#pull-requests)
|
||||||
|
|
||||||
[Styleguides](#styleguides)
|
[Styleguides](#styleguides)
|
||||||
* [Git Commit Messages](#git-commit-messages)
|
|
||||||
* [Python Styleguide](#python-styleguide)
|
- [Git Commit Messages](#git-commit-messages)
|
||||||
* [Documentation Styleguide](#documentation-styleguide)
|
- [Python Styleguide](#python-styleguide)
|
||||||
|
- [Documentation Styleguide](#documentation-styleguide)
|
||||||
|
|
||||||
## Code of Conduct
|
## Code of Conduct
|
||||||
|
|
||||||
@ -39,11 +42,11 @@ Either [create a question on GitHub](https://github.com/goauthentik/authentik/is
|
|||||||
|
|
||||||
authentik consists of a few larger components:
|
authentik consists of a few larger components:
|
||||||
|
|
||||||
- *authentik* the actual application server, is described below.
|
- _authentik_ the actual application server, is described below.
|
||||||
- *outpost-proxy* is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
- _outpost-proxy_ is a Go application based on a forked version of oauth2_proxy, which does identity-aware reverse proxying.
|
||||||
- *outpost-ldap* is a Go LDAP server that uses the *authentik* application server as its backend
|
- _outpost-ldap_ is a Go LDAP server that uses the _authentik_ application server as its backend
|
||||||
- *web* is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
- _web_ is the web frontend, both for administrating and using authentik. It is written in TypeScript using lit-html and the PatternFly CSS Library.
|
||||||
- *website* is the Website/documentation, which uses docusaurus.
|
- _website_ is the Website/documentation, which uses docusaurus.
|
||||||
|
|
||||||
### authentik's structure
|
### authentik's structure
|
||||||
|
|
||||||
@ -154,10 +157,10 @@ While the prerequisites above must be satisfied prior to having your pull reques
|
|||||||
|
|
||||||
### Git Commit Messages
|
### Git Commit Messages
|
||||||
|
|
||||||
* Use the format of `<package>: <verb> <description>`
|
- Use the format of `<package>: <verb> <description>`
|
||||||
- See [here](#authentik-packages) for `package`
|
- See [here](#authentik-packages) for `package`
|
||||||
- Example: `providers/saml2: fix parsing of requests`
|
- Example: `providers/saml2: fix parsing of requests`
|
||||||
* Reference issues and pull requests liberally after the first line
|
- Reference issues and pull requests liberally after the first line
|
||||||
|
|
||||||
### Python Styleguide
|
### Python Styleguide
|
||||||
|
|
||||||
@ -165,11 +168,11 @@ All Python code is linted with [black](https://black.readthedocs.io/en/stable/),
|
|||||||
|
|
||||||
authentik runs on Python 3.9 at the time of writing this.
|
authentik runs on Python 3.9 at the time of writing this.
|
||||||
|
|
||||||
* Use native type-annotations wherever possible.
|
- Use native type-annotations wherever possible.
|
||||||
* Add meaningful docstrings when possible.
|
- Add meaningful docstrings when possible.
|
||||||
* Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
- Ensure any database migrations work properly from the last stable version (this is checked via CI)
|
||||||
* If your code changes central functions, make sure nothing else is broken.
|
- If your code changes central functions, make sure nothing else is broken.
|
||||||
|
|
||||||
### Documentation Styleguide
|
### Documentation Styleguide
|
||||||
|
|
||||||
* Use [MDX](https://mdxjs.com/) whenever appropriate.
|
- Use [MDX](https://mdxjs.com/) whenever appropriate.
|
||||||
|
|||||||
27
Dockerfile
27
Dockerfile
@ -3,6 +3,7 @@ FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
|
|||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./blueprints /work/blueprints/
|
COPY ./blueprints /work/blueprints/
|
||||||
|
COPY ./SECURITY.md /work/
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
WORKDIR /work/website
|
WORKDIR /work/website
|
||||||
@ -19,7 +20,7 @@ WORKDIR /work/web
|
|||||||
RUN npm ci && npm run build
|
RUN npm ci && npm run build
|
||||||
|
|
||||||
# Stage 3: Poetry to requirements.txt export
|
# Stage 3: Poetry to requirements.txt export
|
||||||
FROM docker.io/python:3.11.0-slim-bullseye AS poetry-locker
|
FROM docker.io/python:3.11.1-slim-bullseye AS poetry-locker
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY ./pyproject.toml /work
|
COPY ./pyproject.toml /work
|
||||||
@ -30,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
|||||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||||
|
|
||||||
# Stage 4: Build go proxy
|
# Stage 4: Build go proxy
|
||||||
FROM docker.io/golang:1.19.3-bullseye AS go-builder
|
FROM docker.io/golang:1.19.4-bullseye AS go-builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
|
||||||
@ -45,8 +46,22 @@ COPY ./go.sum /work/go.sum
|
|||||||
|
|
||||||
RUN go build -o /work/authentik ./cmd/server/
|
RUN go build -o /work/authentik ./cmd/server/
|
||||||
|
|
||||||
# Stage 5: Run
|
# Stage 5: MaxMind GeoIP
|
||||||
FROM docker.io/python:3.11.0-slim-bullseye AS final-image
|
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
|
||||||
|
|
||||||
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
|
|
||||||
|
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||||
|
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||||
|
mkdir -p /usr/share/GeoIP && \
|
||||||
|
/bin/sh -c "\
|
||||||
|
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
||||||
|
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
||||||
|
/usr/bin/entry.sh || exit 0 \
|
||||||
|
"
|
||||||
|
|
||||||
|
# Stage 6: Run
|
||||||
|
FROM docker.io/python:3.11.1-slim-bullseye AS final-image
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||||
@ -59,10 +74,11 @@ ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
|||||||
|
|
||||||
COPY --from=poetry-locker /work/requirements.txt /
|
COPY --from=poetry-locker /work/requirements.txt /
|
||||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||||
|
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# Required for installing pip packages
|
# Required for installing pip packages
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev && \
|
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
@ -80,6 +96,7 @@ RUN apt-get update && \
|
|||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
COPY ./xml /xml
|
COPY ./xml /xml
|
||||||
|
COPY ./locale /locale
|
||||||
COPY ./tests /tests
|
COPY ./tests /tests
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./blueprints /blueprints
|
COPY ./blueprints /blueprints
|
||||||
|
|||||||
16
README.md
16
README.md
@ -5,13 +5,13 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
[](https://goauthentik.io/discord)
|
[](https://goauthentik.io/discord)
|
||||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||||
[](https://codecov.io/gh/goauthentik/authentik)
|
[](https://codecov.io/gh/goauthentik/authentik)
|
||||||

|

|
||||||

|

|
||||||
[](https://www.transifex.com/beryjuorg/authentik/)
|
[](https://www.transifex.com/authentik/authentik/)
|
||||||
|
|
||||||
## What is authentik?
|
## What is authentik?
|
||||||
|
|
||||||
@ -25,10 +25,10 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
|||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
Light | Dark
|
| Light | Dark |
|
||||||
--- | ---
|
| ------------------------------------------------------ | ----------------------------------------------------- |
|
||||||
 | 
|
|  |  |
|
||||||
 | 
|
|  |  |
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
|
|||||||
38
SECURITY.md
38
SECURITY.md
@ -1,18 +1,44 @@
|
|||||||
# Security Policy
|
Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
(.x being the latest patch release for each version)
|
(.x being the latest patch release for each version)
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ---------- | ------------------ |
|
| --------- | ------------------ |
|
||||||
| 2022.9.x | :white_check_mark: |
|
| 2022.11.x | :white_check_mark: |
|
||||||
| 2022.10.x | :white_check_mark: |
|
| 2022.12.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io)
|
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
||||||
|
|
||||||
|
## Criticality levels
|
||||||
|
|
||||||
|
### High
|
||||||
|
|
||||||
|
- Authorization bypass
|
||||||
|
- Circumvention of policies
|
||||||
|
|
||||||
|
### Moderate
|
||||||
|
|
||||||
|
- Denial-of-Service attacks
|
||||||
|
|
||||||
|
### Low
|
||||||
|
|
||||||
|
- Unvalidated redirects
|
||||||
|
- Issues requiring uncommon setups
|
||||||
|
|
||||||
|
## Disclosure process
|
||||||
|
|
||||||
|
1. Issue is reported via Email as listed above.
|
||||||
|
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||||
|
3. A criticality level is assigned.
|
||||||
|
4. A fix is created, and if possible tested by the issue reporter.
|
||||||
|
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||||
|
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
||||||
|
7. The fixed version is released for the supported versions.
|
||||||
|
|
||||||
## Getting security notifications
|
## Getting security notifications
|
||||||
|
|
||||||
To get security notifications, join the [discord](https://goauthentik.io/discord) server. In the future there will be a mailing list too.
|
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2022.11.0"
|
__version__ = "2022.12.0"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -11,7 +11,6 @@ from authentik.core.middleware import CTX_AUTH_VIA
|
|||||||
from authentik.core.models import Token, TokenIntents, User
|
from authentik.core.models import Token, TokenIntents, User
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||||
from authentik.providers.oauth2.models import RefreshToken
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -33,6 +32,8 @@ def validate_auth(header: bytes) -> Optional[str]:
|
|||||||
|
|
||||||
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||||
"""raw_header in the Format of `Bearer ....`"""
|
"""raw_header in the Format of `Bearer ....`"""
|
||||||
|
from authentik.providers.oauth2.models import RefreshToken
|
||||||
|
|
||||||
auth_credentials = validate_auth(raw_header)
|
auth_credentials = validate_auth(raw_header)
|
||||||
if not auth_credentials:
|
if not auth_credentials:
|
||||||
return None
|
return None
|
||||||
|
|||||||
@ -49,11 +49,12 @@ from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
|||||||
from authentik.policies.password.api import PasswordPolicyViewSet
|
from authentik.policies.password.api import PasswordPolicyViewSet
|
||||||
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
|
||||||
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
from authentik.providers.oauth2.api.providers import OAuth2ProviderViewSet
|
||||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
from authentik.providers.oauth2.api.scopes import ScopeMappingViewSet
|
||||||
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
from authentik.providers.oauth2.api.tokens import AuthorizationCodeViewSet, RefreshTokenViewSet
|
||||||
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
from authentik.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||||
from authentik.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingViewSet
|
||||||
|
from authentik.providers.saml.api.providers import SAMLProviderViewSet
|
||||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||||
|
|||||||
@ -53,6 +53,21 @@
|
|||||||
"id": {
|
"id": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"state": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"absent",
|
||||||
|
"present",
|
||||||
|
"created"
|
||||||
|
],
|
||||||
|
"default": "present"
|
||||||
|
},
|
||||||
|
"conditions": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
},
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@ -92,7 +92,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
if ":" in url.path:
|
if ":" in url.path:
|
||||||
path, _, ref = path.partition(":")
|
path, _, ref = path.partition(":")
|
||||||
client = NewClient(
|
client = NewClient(
|
||||||
f"{url.scheme}://{url.hostname}",
|
f"https://{url.hostname}",
|
||||||
WithUserAgent(authentik_user_agent()),
|
WithUserAgent(authentik_user_agent()),
|
||||||
WithUsernamePassword(url.username, url.password),
|
WithUsernamePassword(url.username, url.password),
|
||||||
WithDefaultName(path),
|
WithDefaultName(path),
|
||||||
@ -135,12 +135,11 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
|
|
||||||
def retrieve(self) -> str:
|
def retrieve(self) -> str:
|
||||||
"""Retrieve blueprint contents"""
|
"""Retrieve blueprint contents"""
|
||||||
|
if self.path.startswith("oci://"):
|
||||||
|
return self.retrieve_oci()
|
||||||
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
|
||||||
if full_path.exists():
|
|
||||||
LOGGER.debug("Blueprint path exists locally", instance=self)
|
|
||||||
with full_path.open("r", encoding="utf-8") as _file:
|
with full_path.open("r", encoding="utf-8") as _file:
|
||||||
return _file.read()
|
return _file.read()
|
||||||
return self.retrieve_oci()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Serializer:
|
def serializer(self) -> Serializer:
|
||||||
|
|||||||
@ -7,7 +7,6 @@ from django.apps import apps
|
|||||||
|
|
||||||
from authentik.blueprints.apps import ManagedAppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
from authentik.blueprints.models import BlueprintInstance
|
from authentik.blueprints.models import BlueprintInstance
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
|
|
||||||
|
|
||||||
def apply_blueprint(*files: str):
|
def apply_blueprint(*files: str):
|
||||||
@ -46,3 +45,13 @@ def reconcile_app(app_name: str):
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
return wrapper_outer
|
return wrapper_outer
|
||||||
|
|
||||||
|
|
||||||
|
def load_yaml_fixture(path: str, **kwargs) -> str:
|
||||||
|
"""Load yaml fixture, optionally formatting it with kwargs"""
|
||||||
|
with open(Path(__file__).resolve().parent / Path(path), "r", encoding="utf-8") as _fixture:
|
||||||
|
fixture = _fixture.read()
|
||||||
|
try:
|
||||||
|
return fixture % kwargs
|
||||||
|
except TypeError:
|
||||||
|
return fixture
|
||||||
|
|||||||
21
authentik/blueprints/tests/fixtures/conditions_fulfilled.yaml
vendored
Normal file
21
authentik/blueprints/tests/fixtures/conditions_fulfilled.yaml
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id1)s"
|
||||||
|
slug: "%(id1)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
conditions:
|
||||||
|
- true
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id2)s"
|
||||||
|
slug: "%(id2)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
conditions:
|
||||||
|
- true
|
||||||
|
- true
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
21
authentik/blueprints/tests/fixtures/conditions_not_fulfilled.yaml
vendored
Normal file
21
authentik/blueprints/tests/fixtures/conditions_not_fulfilled.yaml
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id1)s"
|
||||||
|
slug: "%(id1)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
conditions:
|
||||||
|
- false
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id2)s"
|
||||||
|
slug: "%(id2)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
conditions:
|
||||||
|
- true
|
||||||
|
- false
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
7
authentik/blueprints/tests/fixtures/state_absent.yaml
vendored
Normal file
7
authentik/blueprints/tests/fixtures/state_absent.yaml
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id)s"
|
||||||
|
slug: "%(id)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
state: absent
|
||||||
10
authentik/blueprints/tests/fixtures/state_created.yaml
vendored
Normal file
10
authentik/blueprints/tests/fixtures/state_created.yaml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id)s"
|
||||||
|
slug: "%(id)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
state: created
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
10
authentik/blueprints/tests/fixtures/state_present.yaml
vendored
Normal file
10
authentik/blueprints/tests/fixtures/state_present.yaml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
name: "%(id)s"
|
||||||
|
slug: "%(id)s"
|
||||||
|
model: authentik_flows.flow
|
||||||
|
state: present
|
||||||
|
attrs:
|
||||||
|
designation: stage_configuration
|
||||||
|
title: foo
|
||||||
12
authentik/blueprints/tests/fixtures/static_prompt_export.yaml
vendored
Normal file
12
authentik/blueprints/tests/fixtures/static_prompt_export.yaml
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
version: 1
|
||||||
|
entries:
|
||||||
|
- identifiers:
|
||||||
|
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
||||||
|
model: authentik_stages_prompt.prompt
|
||||||
|
attrs:
|
||||||
|
field_key: username
|
||||||
|
label: Username
|
||||||
|
type: username
|
||||||
|
required: true
|
||||||
|
placeholder: Username
|
||||||
|
order: 0
|
||||||
100
authentik/blueprints/tests/fixtures/tags.yaml
vendored
Normal file
100
authentik/blueprints/tests/fixtures/tags.yaml
vendored
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
version: 1
|
||||||
|
context:
|
||||||
|
foo: bar
|
||||||
|
policy_property: name
|
||||||
|
policy_property_value: foo-bar-baz-qux
|
||||||
|
entries:
|
||||||
|
- model: authentik_sources_oauth.oauthsource
|
||||||
|
identifiers:
|
||||||
|
slug: test
|
||||||
|
attrs:
|
||||||
|
name: test
|
||||||
|
provider_type: github
|
||||||
|
consumer_key: !Env foo
|
||||||
|
consumer_secret: !Env [bar, baz]
|
||||||
|
authentication_flow:
|
||||||
|
!Find [
|
||||||
|
authentik_flows.Flow,
|
||||||
|
[slug, default-source-authentication],
|
||||||
|
]
|
||||||
|
enrollment_flow:
|
||||||
|
!Find [authentik_flows.Flow, [slug, default-source-enrollment]]
|
||||||
|
- attrs:
|
||||||
|
expression: return True
|
||||||
|
identifiers:
|
||||||
|
name: !Format [foo-%s-%s-%s, !Context foo, !Context bar, qux]
|
||||||
|
id: policy
|
||||||
|
model: authentik_policies_expression.expressionpolicy
|
||||||
|
- attrs:
|
||||||
|
attributes:
|
||||||
|
policy_pk1:
|
||||||
|
!Format [
|
||||||
|
"%s-%s",
|
||||||
|
!Find [
|
||||||
|
authentik_policies_expression.expressionpolicy,
|
||||||
|
[
|
||||||
|
!Context policy_property,
|
||||||
|
!Context policy_property_value,
|
||||||
|
],
|
||||||
|
[expression, return True],
|
||||||
|
],
|
||||||
|
suffix,
|
||||||
|
]
|
||||||
|
policy_pk2: !Format ["%s-%s", !KeyOf policy, suffix]
|
||||||
|
boolAnd:
|
||||||
|
!Condition [AND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||||
|
boolNand:
|
||||||
|
!Condition [NAND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||||
|
boolOr:
|
||||||
|
!Condition [
|
||||||
|
OR,
|
||||||
|
!Context foo,
|
||||||
|
!Format ["%s", "a_string"],
|
||||||
|
null,
|
||||||
|
]
|
||||||
|
boolNor:
|
||||||
|
!Condition [
|
||||||
|
NOR,
|
||||||
|
!Context foo,
|
||||||
|
!Format ["%s", "a_string"],
|
||||||
|
null,
|
||||||
|
]
|
||||||
|
boolXor:
|
||||||
|
!Condition [XOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||||
|
boolXnor:
|
||||||
|
!Condition [XNOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||||
|
boolComplex:
|
||||||
|
!Condition [
|
||||||
|
XNOR,
|
||||||
|
!Condition [AND, !Context non_existing],
|
||||||
|
!Condition [NOR, a string],
|
||||||
|
!Condition [XOR, null],
|
||||||
|
]
|
||||||
|
if_true_complex:
|
||||||
|
!If [
|
||||||
|
true,
|
||||||
|
{
|
||||||
|
dictionary:
|
||||||
|
{
|
||||||
|
with: { keys: "and_values" },
|
||||||
|
and_nested_custom_tags:
|
||||||
|
!Format ["foo-%s", !Context foo],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
]
|
||||||
|
if_false_complex:
|
||||||
|
!If [
|
||||||
|
!Condition [AND, false],
|
||||||
|
null,
|
||||||
|
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||||
|
]
|
||||||
|
if_true_simple: !If [!Context foo, true, text]
|
||||||
|
if_false_simple: !If [null, false, 2]
|
||||||
|
identifiers:
|
||||||
|
name: test
|
||||||
|
conditions:
|
||||||
|
- !Condition [AND, true, true, text]
|
||||||
|
- true
|
||||||
|
- text
|
||||||
|
model: authentik_core.group
|
||||||
@ -1,41 +1,20 @@
|
|||||||
"""Test blueprints v1"""
|
"""Test blueprints v1"""
|
||||||
|
from os import environ
|
||||||
|
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
from authentik.blueprints.v1.exporter import FlowExporter
|
from authentik.blueprints.v1.exporter import FlowExporter
|
||||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||||
|
from authentik.core.models import Group
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.sources.oauth.models import OAuthSource
|
||||||
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||||
from authentik.stages.user_login.models import UserLoginStage
|
from authentik.stages.user_login.models import UserLoginStage
|
||||||
|
|
||||||
STATIC_PROMPT_EXPORT = """version: 1
|
|
||||||
entries:
|
|
||||||
- identifiers:
|
|
||||||
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
|
|
||||||
model: authentik_stages_prompt.prompt
|
|
||||||
attrs:
|
|
||||||
field_key: username
|
|
||||||
label: Username
|
|
||||||
type: username
|
|
||||||
required: true
|
|
||||||
placeholder: Username
|
|
||||||
order: 0
|
|
||||||
"""
|
|
||||||
|
|
||||||
YAML_TAG_TESTS = """version: 1
|
|
||||||
context:
|
|
||||||
foo: bar
|
|
||||||
entries:
|
|
||||||
- attrs:
|
|
||||||
expression: return True
|
|
||||||
identifiers:
|
|
||||||
name: !Format [foo-%s-%s, !Context foo, !Context bar]
|
|
||||||
id: default-source-enrollment-if-username
|
|
||||||
model: authentik_policies_expression.expressionpolicy
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class TestBlueprintsV1(TransactionTestCase):
|
class TestBlueprintsV1(TransactionTestCase):
|
||||||
"""Test Blueprints"""
|
"""Test Blueprints"""
|
||||||
@ -51,6 +30,61 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
|
importer = Importer(
|
||||||
|
(
|
||||||
|
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||||
|
'"identifiers": {}, '
|
||||||
|
'"model": "authentik_core.Group"}]}'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertFalse(importer.validate()[0])
|
||||||
|
|
||||||
|
def test_validated_import_dict_identifiers(self):
|
||||||
|
"""Test importing blueprints with dict identifiers."""
|
||||||
|
Group.objects.filter(name__istartswith="test").delete()
|
||||||
|
|
||||||
|
Group.objects.create(
|
||||||
|
name="test1",
|
||||||
|
attributes={
|
||||||
|
"key": ["value"],
|
||||||
|
"other_key": ["a_value", "other_value"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
Group.objects.create(
|
||||||
|
name="test2",
|
||||||
|
attributes={
|
||||||
|
"key": ["value"],
|
||||||
|
"other_key": ["diff_value", "other_diff_value"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer(
|
||||||
|
(
|
||||||
|
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||||
|
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||||
|
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
self.assertTrue(
|
||||||
|
Group.objects.filter(
|
||||||
|
name="test2",
|
||||||
|
attributes={
|
||||||
|
"key": ["value"],
|
||||||
|
"other_key": ["diff_value", "other_diff_value"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertTrue(
|
||||||
|
Group.objects.filter(
|
||||||
|
name="test999",
|
||||||
|
# All attributes used as identifiers are kept and merged with the
|
||||||
|
# new attributes declared in the blueprint
|
||||||
|
attributes={"key": ["updated_value"], "other_key": ["other_value"]},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertFalse(Group.objects.filter(name="test1"))
|
||||||
|
|
||||||
def test_export_validate_import(self):
|
def test_export_validate_import(self):
|
||||||
"""Test export and validate it"""
|
"""Test export and validate it"""
|
||||||
@ -85,25 +119,58 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
"""Test export and import it twice"""
|
"""Test export and import it twice"""
|
||||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||||
|
|
||||||
importer = Importer(STATIC_PROMPT_EXPORT)
|
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
count_before = Prompt.objects.filter(field_key="username").count()
|
count_before = Prompt.objects.filter(field_key="username").count()
|
||||||
self.assertEqual(count_initial + 1, count_before)
|
self.assertEqual(count_initial + 1, count_before)
|
||||||
|
|
||||||
importer = Importer(STATIC_PROMPT_EXPORT)
|
importer = Importer(load_yaml_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||||
|
|
||||||
def test_import_yaml_tags(self):
|
def test_import_yaml_tags(self):
|
||||||
"""Test some yaml tags"""
|
"""Test some yaml tags"""
|
||||||
ExpressionPolicy.objects.filter(name="foo-foo-bar").delete()
|
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||||
importer = Importer(YAML_TAG_TESTS, {"bar": "baz"})
|
Group.objects.filter(name="test").delete()
|
||||||
|
environ["foo"] = generate_id()
|
||||||
|
importer = Importer(load_yaml_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
self.assertTrue(ExpressionPolicy.objects.filter(name="foo-foo-bar"))
|
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||||
|
self.assertTrue(policy)
|
||||||
|
self.assertTrue(
|
||||||
|
Group.objects.filter(
|
||||||
|
attributes={
|
||||||
|
"policy_pk1": str(policy.pk) + "-suffix",
|
||||||
|
"policy_pk2": str(policy.pk) + "-suffix",
|
||||||
|
"boolAnd": True,
|
||||||
|
"boolNand": False,
|
||||||
|
"boolOr": True,
|
||||||
|
"boolNor": False,
|
||||||
|
"boolXor": True,
|
||||||
|
"boolXnor": False,
|
||||||
|
"boolComplex": True,
|
||||||
|
"if_true_complex": {
|
||||||
|
"dictionary": {
|
||||||
|
"with": {"keys": "and_values"},
|
||||||
|
"and_nested_custom_tags": "foo-bar",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||||
|
"if_true_simple": True,
|
||||||
|
"if_false_simple": 2,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertTrue(
|
||||||
|
OAuthSource.objects.filter(
|
||||||
|
slug="test",
|
||||||
|
consumer_key=environ["foo"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def test_export_validate_import_policies(self):
|
def test_export_validate_import_policies(self):
|
||||||
"""Test export and validate it"""
|
"""Test export and validate it"""
|
||||||
|
|||||||
43
authentik/blueprints/tests/test_v1_conditions.py
Normal file
43
authentik/blueprints/tests/test_v1_conditions.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""Test blueprints v1"""
|
||||||
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
from authentik.flows.models import Flow
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||||
|
"""Test Blueprints conditions attribute"""
|
||||||
|
|
||||||
|
def test_conditions_fulfilled(self):
|
||||||
|
"""Test conditions fulfilled"""
|
||||||
|
flow_slug1 = generate_id()
|
||||||
|
flow_slug2 = generate_id()
|
||||||
|
import_yaml = load_yaml_fixture(
|
||||||
|
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
# Ensure objects exist
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug1).first()
|
||||||
|
self.assertEqual(flow.slug, flow_slug1)
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug2).first()
|
||||||
|
self.assertEqual(flow.slug, flow_slug2)
|
||||||
|
|
||||||
|
def test_conditions_not_fulfilled(self):
|
||||||
|
"""Test conditions not fulfilled"""
|
||||||
|
flow_slug1 = generate_id()
|
||||||
|
flow_slug2 = generate_id()
|
||||||
|
import_yaml = load_yaml_fixture(
|
||||||
|
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
# Ensure objects do not exist
|
||||||
|
self.assertFalse(Flow.objects.filter(slug=flow_slug1))
|
||||||
|
self.assertFalse(Flow.objects.filter(slug=flow_slug2))
|
||||||
82
authentik/blueprints/tests/test_v1_state.py
Normal file
82
authentik/blueprints/tests/test_v1_state.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
"""Test blueprints v1"""
|
||||||
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import load_yaml_fixture
|
||||||
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
from authentik.flows.models import Flow
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestBlueprintsV1State(TransactionTestCase):
|
||||||
|
"""Test Blueprints state attribute"""
|
||||||
|
|
||||||
|
def test_state_present(self):
|
||||||
|
"""Test state present"""
|
||||||
|
flow_slug = generate_id()
|
||||||
|
import_yaml = load_yaml_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||||
|
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
# Ensure object exists
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
|
# Update object
|
||||||
|
flow.title = "bar"
|
||||||
|
flow.save()
|
||||||
|
|
||||||
|
flow.refresh_from_db()
|
||||||
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
|
# Ensure importer updates it
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertEqual(flow.title, "foo")
|
||||||
|
|
||||||
|
def test_state_created(self):
|
||||||
|
"""Test state created"""
|
||||||
|
flow_slug = generate_id()
|
||||||
|
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
# Ensure object exists
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
|
# Update object
|
||||||
|
flow.title = "bar"
|
||||||
|
flow.save()
|
||||||
|
|
||||||
|
flow.refresh_from_db()
|
||||||
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
|
# Ensure importer doesn't update it
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
|
def test_state_absent(self):
|
||||||
|
"""Test state absent"""
|
||||||
|
flow_slug = generate_id()
|
||||||
|
import_yaml = load_yaml_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
# Ensure object exists
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
|
import_yaml = load_yaml_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||||
|
importer = Importer(import_yaml)
|
||||||
|
self.assertTrue(importer.validate()[0])
|
||||||
|
self.assertTrue(importer.apply())
|
||||||
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
self.assertIsNone(flow)
|
||||||
@ -67,25 +67,8 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
@CONFIG.patch("blueprints_dir", TMP)
|
@CONFIG.patch("blueprints_dir", TMP)
|
||||||
def test_valid_updated(self):
|
def test_valid_updated(self):
|
||||||
"""Test valid file"""
|
"""Test valid file"""
|
||||||
|
BlueprintInstance.objects.filter(name="foo").delete()
|
||||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||||
file.write(
|
|
||||||
dump(
|
|
||||||
{
|
|
||||||
"version": 1,
|
|
||||||
"entries": [],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
file.flush()
|
|
||||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
|
||||||
self.assertEqual(
|
|
||||||
BlueprintInstance.objects.first().last_applied_hash,
|
|
||||||
(
|
|
||||||
"e52bb445b03cd36057258dc9f0ce0fbed8278498ee1470e45315293e5f026d1b"
|
|
||||||
"d1f9b3526871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
self.assertEqual(BlueprintInstance.objects.first().metadata, {})
|
|
||||||
file.write(
|
file.write(
|
||||||
dump(
|
dump(
|
||||||
{
|
{
|
||||||
@ -99,18 +82,44 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||||||
)
|
)
|
||||||
file.flush()
|
file.flush()
|
||||||
blueprints_discover() # pylint: disable=no-value-for-parameter
|
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||||
|
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
BlueprintInstance.objects.first().last_applied_hash,
|
blueprint.last_applied_hash,
|
||||||
(
|
(
|
||||||
"fc62fea96067da8592bdf90927246d0ca150b045447df93b0652a0e20a8bc327"
|
"b86ec439b3857350714f070d2833490e736d9155d3d97b2cac13f3b352223e5a"
|
||||||
"681510b5db37ea98759c61f9a98dd2381f46a3b5a2da69dfb45158897f14e824"
|
"1adbf8ec56fa616d46090cc4773ff9e46c4e509fde96b97de87dd21fa329ca1a"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.assertEqual(blueprint.metadata, {"labels": {}, "name": "foo"})
|
||||||
|
file.write(
|
||||||
|
dump(
|
||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"entries": [],
|
||||||
|
"metadata": {
|
||||||
|
"name": "foo",
|
||||||
|
"labels": {
|
||||||
|
"foo": "bar",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
file.flush()
|
||||||
|
blueprints_discover() # pylint: disable=no-value-for-parameter
|
||||||
|
blueprint.refresh_from_db()
|
||||||
|
self.assertEqual(
|
||||||
|
blueprint.last_applied_hash,
|
||||||
|
(
|
||||||
|
"87b68b10131d2c9751ed308bba38f04734b9e2cdf8532ed617bc52979b063c49"
|
||||||
|
"2564f33f3d20ab9d5f0fd9e6eb77a13942e060199f147789cb7afab9690e72b5"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
BlueprintInstance.objects.first().metadata,
|
blueprint.metadata,
|
||||||
{
|
{
|
||||||
"name": "foo",
|
"name": "foo",
|
||||||
"labels": {},
|
"labels": {"foo": "bar"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,10 @@
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from dataclasses import asdict, dataclass, field, is_dataclass
|
from dataclasses import asdict, dataclass, field, is_dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Optional
|
from functools import reduce
|
||||||
|
from operator import ixor
|
||||||
|
from os import getenv
|
||||||
|
from typing import Any, Literal, Optional
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
@ -41,11 +44,21 @@ class BlueprintEntryState:
|
|||||||
instance: Optional[Model] = None
|
instance: Optional[Model] = None
|
||||||
|
|
||||||
|
|
||||||
|
class BlueprintEntryDesiredState(Enum):
|
||||||
|
"""State an entry should be reconciled to"""
|
||||||
|
|
||||||
|
ABSENT = "absent"
|
||||||
|
PRESENT = "present"
|
||||||
|
CREATED = "created"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BlueprintEntry:
|
class BlueprintEntry:
|
||||||
"""Single entry of a blueprint"""
|
"""Single entry of a blueprint"""
|
||||||
|
|
||||||
model: str
|
model: str
|
||||||
|
state: BlueprintEntryDesiredState = field(default=BlueprintEntryDesiredState.PRESENT)
|
||||||
|
conditions: list[Any] = field(default_factory=list)
|
||||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||||
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
|
||||||
|
|
||||||
@ -90,6 +103,10 @@ class BlueprintEntry:
|
|||||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||||
return self.tag_resolver(self.identifiers, blueprint)
|
return self.tag_resolver(self.identifiers, blueprint)
|
||||||
|
|
||||||
|
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
|
||||||
|
"""Check all conditions of this entry match (evaluate to True)"""
|
||||||
|
return all(self.tag_resolver(self.conditions, blueprint))
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BlueprintMetadata:
|
class BlueprintMetadata:
|
||||||
@ -144,6 +161,26 @@ class KeyOf(YAMLTag):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Env(YAMLTag):
|
||||||
|
"""Lookup environment variable with optional default"""
|
||||||
|
|
||||||
|
key: str
|
||||||
|
default: Optional[Any]
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.default = None
|
||||||
|
if isinstance(node, ScalarNode):
|
||||||
|
self.key = node.value
|
||||||
|
if isinstance(node, SequenceNode):
|
||||||
|
self.key = node.value[0].value
|
||||||
|
self.default = node.value[1].value
|
||||||
|
|
||||||
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
|
return getenv(self.key, self.default)
|
||||||
|
|
||||||
|
|
||||||
class Context(YAMLTag):
|
class Context(YAMLTag):
|
||||||
"""Lookup key from instance context"""
|
"""Lookup key from instance context"""
|
||||||
|
|
||||||
@ -179,11 +216,18 @@ class Format(YAMLTag):
|
|||||||
self.format_string = node.value[0].value
|
self.format_string = node.value[0].value
|
||||||
self.args = []
|
self.args = []
|
||||||
for raw_node in node.value[1:]:
|
for raw_node in node.value[1:]:
|
||||||
self.args.append(raw_node.value)
|
self.args.append(loader.construct_object(raw_node))
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
|
args = []
|
||||||
|
for arg in self.args:
|
||||||
|
if isinstance(arg, YAMLTag):
|
||||||
|
args.append(arg.resolve(entry, blueprint))
|
||||||
|
else:
|
||||||
|
args.append(arg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.format_string % tuple(self.args)
|
return self.format_string % tuple(args)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
@ -210,13 +254,93 @@ class Find(YAMLTag):
|
|||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
query = Q()
|
query = Q()
|
||||||
for cond in self.conditions:
|
for cond in self.conditions:
|
||||||
query &= Q(**{cond[0]: cond[1]})
|
if isinstance(cond[0], YAMLTag):
|
||||||
|
query_key = cond[0].resolve(entry, blueprint)
|
||||||
|
else:
|
||||||
|
query_key = cond[0]
|
||||||
|
if isinstance(cond[1], YAMLTag):
|
||||||
|
query_value = cond[1].resolve(entry, blueprint)
|
||||||
|
else:
|
||||||
|
query_value = cond[1]
|
||||||
|
query &= Q(**{query_key: query_value})
|
||||||
instance = self.model_class.objects.filter(query).first()
|
instance = self.model_class.objects.filter(query).first()
|
||||||
if instance:
|
if instance:
|
||||||
return instance.pk
|
return instance.pk
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Condition(YAMLTag):
|
||||||
|
"""Convert all values to a single boolean"""
|
||||||
|
|
||||||
|
mode: Literal["AND", "NAND", "OR", "NOR", "XOR", "XNOR"]
|
||||||
|
args: list[Any]
|
||||||
|
|
||||||
|
_COMPARATORS = {
|
||||||
|
# Using all and any here instead of from operator import iand, ior
|
||||||
|
# to improve performance
|
||||||
|
"AND": all,
|
||||||
|
"NAND": lambda args: not all(args),
|
||||||
|
"OR": any,
|
||||||
|
"NOR": lambda args: not any(args),
|
||||||
|
"XOR": lambda args: reduce(ixor, args) if len(args) > 1 else args[0],
|
||||||
|
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||||
|
}
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.mode = node.value[0].value
|
||||||
|
self.args = []
|
||||||
|
for raw_node in node.value[1:]:
|
||||||
|
self.args.append(loader.construct_object(raw_node))
|
||||||
|
|
||||||
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
|
args = []
|
||||||
|
for arg in self.args:
|
||||||
|
if isinstance(arg, YAMLTag):
|
||||||
|
args.append(arg.resolve(entry, blueprint))
|
||||||
|
else:
|
||||||
|
args.append(arg)
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
comparator = self._COMPARATORS[self.mode.upper()]
|
||||||
|
return comparator(tuple(bool(x) for x in args))
|
||||||
|
except (TypeError, KeyError) as exc:
|
||||||
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
|
class If(YAMLTag):
|
||||||
|
"""Select YAML to use based on condition"""
|
||||||
|
|
||||||
|
condition: Any
|
||||||
|
when_true: Any
|
||||||
|
when_false: Any
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.condition = loader.construct_object(node.value[0])
|
||||||
|
self.when_true = loader.construct_object(node.value[1])
|
||||||
|
self.when_false = loader.construct_object(node.value[2])
|
||||||
|
|
||||||
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
|
if isinstance(self.condition, YAMLTag):
|
||||||
|
condition = self.condition.resolve(entry, blueprint)
|
||||||
|
else:
|
||||||
|
condition = self.condition
|
||||||
|
|
||||||
|
try:
|
||||||
|
return entry.tag_resolver(
|
||||||
|
self.when_true if condition else self.when_false,
|
||||||
|
blueprint,
|
||||||
|
)
|
||||||
|
except TypeError as exc:
|
||||||
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
class BlueprintDumper(SafeDumper):
|
class BlueprintDumper(SafeDumper):
|
||||||
"""Dump dataclasses to yaml"""
|
"""Dump dataclasses to yaml"""
|
||||||
|
|
||||||
@ -227,8 +351,15 @@ class BlueprintDumper(SafeDumper):
|
|||||||
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
|
self.add_representer(UUID, lambda self, data: self.represent_str(str(data)))
|
||||||
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
|
self.add_representer(OrderedDict, lambda self, data: self.represent_dict(dict(data)))
|
||||||
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
|
self.add_representer(Enum, lambda self, data: self.represent_str(data.value))
|
||||||
|
self.add_representer(
|
||||||
|
BlueprintEntryDesiredState, lambda self, data: self.represent_str(data.value)
|
||||||
|
)
|
||||||
self.add_representer(None, lambda self, data: self.represent_str(str(data)))
|
self.add_representer(None, lambda self, data: self.represent_str(str(data)))
|
||||||
|
|
||||||
|
def ignore_aliases(self, data):
|
||||||
|
"""Don't use any YAML anchors"""
|
||||||
|
return True
|
||||||
|
|
||||||
def represent(self, data) -> None:
|
def represent(self, data) -> None:
|
||||||
if is_dataclass(data):
|
if is_dataclass(data):
|
||||||
|
|
||||||
@ -250,6 +381,9 @@ class BlueprintLoader(SafeLoader):
|
|||||||
self.add_constructor("!Find", Find)
|
self.add_constructor("!Find", Find)
|
||||||
self.add_constructor("!Context", Context)
|
self.add_constructor("!Context", Context)
|
||||||
self.add_constructor("!Format", Format)
|
self.add_constructor("!Format", Format)
|
||||||
|
self.add_constructor("!Condition", Condition)
|
||||||
|
self.add_constructor("!If", If)
|
||||||
|
self.add_constructor("!Env", Env)
|
||||||
|
|
||||||
|
|
||||||
class EntryInvalidError(SentryIgnoredException):
|
class EntryInvalidError(SentryIgnoredException):
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from contextlib import contextmanager
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from dacite.config import Config
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
@ -20,6 +21,7 @@ from yaml import load
|
|||||||
from authentik.blueprints.v1.common import (
|
from authentik.blueprints.v1.common import (
|
||||||
Blueprint,
|
Blueprint,
|
||||||
BlueprintEntry,
|
BlueprintEntry,
|
||||||
|
BlueprintEntryDesiredState,
|
||||||
BlueprintEntryState,
|
BlueprintEntryState,
|
||||||
BlueprintLoader,
|
BlueprintLoader,
|
||||||
EntryInvalidError,
|
EntryInvalidError,
|
||||||
@ -82,7 +84,9 @@ class Importer:
|
|||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
import_dict = load(yaml_input, BlueprintLoader)
|
import_dict = load(yaml_input, BlueprintLoader)
|
||||||
try:
|
try:
|
||||||
self.__import = from_dict(Blueprint, import_dict)
|
self.__import = from_dict(
|
||||||
|
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||||
|
)
|
||||||
except DaciteError as exc:
|
except DaciteError as exc:
|
||||||
raise EntryInvalidError from exc
|
raise EntryInvalidError from exc
|
||||||
ctx = {}
|
ctx = {}
|
||||||
@ -128,15 +132,22 @@ class Importer:
|
|||||||
main_query = Q(pk=attrs["pk"])
|
main_query = Q(pk=attrs["pk"])
|
||||||
sub_query = Q()
|
sub_query = Q()
|
||||||
for identifier, value in attrs.items():
|
for identifier, value in attrs.items():
|
||||||
if isinstance(value, dict):
|
|
||||||
continue
|
|
||||||
if identifier == "pk":
|
if identifier == "pk":
|
||||||
continue
|
continue
|
||||||
|
if isinstance(value, dict):
|
||||||
|
sub_query &= Q(**{f"{identifier}__contains": value})
|
||||||
|
else:
|
||||||
sub_query &= Q(**{identifier: value})
|
sub_query &= Q(**{identifier: value})
|
||||||
|
|
||||||
return main_query | sub_query
|
return main_query | sub_query
|
||||||
|
|
||||||
def _validate_single(self, entry: BlueprintEntry) -> BaseSerializer:
|
# pylint: disable-msg=too-many-locals
|
||||||
|
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||||
"""Validate a single entry"""
|
"""Validate a single entry"""
|
||||||
|
if not entry.check_all_conditions_match(self.__import):
|
||||||
|
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||||
|
return None
|
||||||
|
|
||||||
model_app_label, model_name = entry.model.split(".")
|
model_app_label, model_name = entry.model.split(".")
|
||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
# Don't use isinstance since we don't want to check for inheritance
|
# Don't use isinstance since we don't want to check for inheritance
|
||||||
@ -152,8 +163,6 @@ class Importer:
|
|||||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
if entry.identifiers == {}:
|
|
||||||
raise EntryInvalidError("No identifiers")
|
|
||||||
|
|
||||||
# If we try to validate without referencing a possible instance
|
# If we try to validate without referencing a possible instance
|
||||||
# we'll get a duplicate error, hence we load the model here and return
|
# we'll get a duplicate error, hence we load the model here and return
|
||||||
@ -165,11 +174,19 @@ class Importer:
|
|||||||
if isinstance(value, dict) and "pk" in value:
|
if isinstance(value, dict) and "pk" in value:
|
||||||
del updated_identifiers[key]
|
del updated_identifiers[key]
|
||||||
updated_identifiers[f"{key}"] = value["pk"]
|
updated_identifiers[f"{key}"] = value["pk"]
|
||||||
existing_models = model.objects.filter(self.__query_from_identifier(updated_identifiers))
|
|
||||||
|
query = self.__query_from_identifier(updated_identifiers)
|
||||||
|
if not query:
|
||||||
|
raise EntryInvalidError("No or invalid identifiers")
|
||||||
|
|
||||||
|
existing_models = model.objects.filter(query)
|
||||||
|
|
||||||
serializer_kwargs = {}
|
serializer_kwargs = {}
|
||||||
if not isinstance(model(), BaseMetaModel) and existing_models.exists():
|
|
||||||
model_instance = existing_models.first()
|
model_instance = existing_models.first()
|
||||||
|
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||||
|
if entry.state == BlueprintEntryDesiredState.CREATED:
|
||||||
|
self.logger.debug("instance exists, skipping")
|
||||||
|
return None
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialise serializer with instance",
|
"initialise serializer with instance",
|
||||||
model=model,
|
model=model,
|
||||||
@ -191,7 +208,7 @@ class Importer:
|
|||||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise EntryInvalidError(exc) from exc
|
raise EntryInvalidError(exc) from exc
|
||||||
full_data.update(updated_identifiers)
|
always_merger.merge(full_data, updated_identifiers)
|
||||||
serializer_kwargs["data"] = full_data
|
serializer_kwargs["data"] = full_data
|
||||||
|
|
||||||
serializer: Serializer = model().serializer(**serializer_kwargs)
|
serializer: Serializer = model().serializer(**serializer_kwargs)
|
||||||
@ -234,12 +251,25 @@ class Importer:
|
|||||||
except EntryInvalidError as exc:
|
except EntryInvalidError as exc:
|
||||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||||
return False
|
return False
|
||||||
|
if not serializer:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if entry.state in [
|
||||||
|
BlueprintEntryDesiredState.PRESENT,
|
||||||
|
BlueprintEntryDesiredState.CREATED,
|
||||||
|
]:
|
||||||
model = serializer.save()
|
model = serializer.save()
|
||||||
if "pk" in entry.identifiers:
|
if "pk" in entry.identifiers:
|
||||||
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||||
entry._state = BlueprintEntryState(model)
|
entry._state = BlueprintEntryState(model)
|
||||||
self.logger.debug("updated model", model=model)
|
self.logger.debug("updated model", model=model)
|
||||||
|
elif entry.state == BlueprintEntryDesiredState.ABSENT:
|
||||||
|
instance: Optional[Model] = serializer.instance
|
||||||
|
if instance:
|
||||||
|
instance.delete()
|
||||||
|
self.logger.debug("deleted model", mode=instance)
|
||||||
|
continue
|
||||||
|
self.logger.debug("entry to delete with no instance, skipping")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||||
|
|||||||
@ -196,9 +196,9 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if not should_cache:
|
if not should_cache:
|
||||||
allowed_applications = self._get_allowed_applications(queryset)
|
allowed_applications = self._get_allowed_applications(queryset)
|
||||||
if should_cache:
|
if should_cache:
|
||||||
LOGGER.debug("Caching allowed application list")
|
|
||||||
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
allowed_applications = cache.get(user_app_cache_key(self.request.user.pk))
|
||||||
if not allowed_applications:
|
if not allowed_applications:
|
||||||
|
LOGGER.debug("Caching allowed application list")
|
||||||
allowed_applications = self._get_allowed_applications(queryset)
|
allowed_applications = self._get_allowed_applications(queryset)
|
||||||
cache.set(
|
cache.set(
|
||||||
user_app_cache_key(self.request.user.pk),
|
user_app_cache_key(self.request.user.pk),
|
||||||
|
|||||||
@ -2,13 +2,20 @@
|
|||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
|
from django.http import Http404
|
||||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import is_dict
|
from authentik.core.api.utils import is_dict
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
@ -134,3 +141,63 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if self.request.user.has_perm("authentik_core.view_group"):
|
if self.request.user.has_perm("authentik_core.view_group"):
|
||||||
return self._filter_queryset_for_list(queryset)
|
return self._filter_queryset_for_list(queryset)
|
||||||
return super().filter_queryset(queryset)
|
return super().filter_queryset(queryset)
|
||||||
|
|
||||||
|
@permission_required(None, ["authentik_core.add_user"])
|
||||||
|
@extend_schema(
|
||||||
|
request=inline_serializer(
|
||||||
|
"UserAccountSerializer",
|
||||||
|
{
|
||||||
|
"pk": IntegerField(required=True),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
responses={
|
||||||
|
204: OpenApiResponse(description="User added"),
|
||||||
|
404: OpenApiResponse(description="User not found"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=unused-argument, invalid-name
|
||||||
|
def add_user(self, request: Request, pk: str) -> Response:
|
||||||
|
"""Add user to group"""
|
||||||
|
group: Group = self.get_object()
|
||||||
|
user: User = (
|
||||||
|
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||||
|
.filter(
|
||||||
|
pk=request.data.get("pk"),
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not user:
|
||||||
|
raise Http404
|
||||||
|
group.users.add(user)
|
||||||
|
return Response(status=204)
|
||||||
|
|
||||||
|
@permission_required(None, ["authentik_core.add_user"])
|
||||||
|
@extend_schema(
|
||||||
|
request=inline_serializer(
|
||||||
|
"UserAccountSerializer",
|
||||||
|
{
|
||||||
|
"pk": IntegerField(required=True),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
responses={
|
||||||
|
204: OpenApiResponse(description="User added"),
|
||||||
|
404: OpenApiResponse(description="User not found"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||||
|
# pylint: disable=unused-argument, invalid-name
|
||||||
|
def remove_user(self, request: Request, pk: str) -> Response:
|
||||||
|
"""Add user to group"""
|
||||||
|
group: Group = self.get_object()
|
||||||
|
user: User = (
|
||||||
|
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||||
|
.filter(
|
||||||
|
pk=request.data.get("pk"),
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not user:
|
||||||
|
raise Http404
|
||||||
|
group.users.remove(user)
|
||||||
|
return Response(status=204)
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from rest_framework.fields import CharField, IntegerField
|
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
||||||
|
|
||||||
|
|
||||||
@ -23,6 +23,12 @@ class PassiveSerializer(Serializer):
|
|||||||
return Model()
|
return Model()
|
||||||
|
|
||||||
|
|
||||||
|
class PropertyMappingPreviewSerializer(PassiveSerializer):
|
||||||
|
"""Preview how the current user is mapped via the property mappings selected in a provider"""
|
||||||
|
|
||||||
|
preview = JSONField(read_only=True)
|
||||||
|
|
||||||
|
|
||||||
class MetaNameSerializer(PassiveSerializer):
|
class MetaNameSerializer(PassiveSerializer):
|
||||||
"""Add verbose names to response"""
|
"""Add verbose names to response"""
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
"""authentik shell command"""
|
"""authentik shell command"""
|
||||||
import code
|
import code
|
||||||
import platform
|
import platform
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
@ -89,6 +91,21 @@ class Command(BaseCommand):
|
|||||||
exec(options["command"], namespace) # nosec # noqa
|
exec(options["command"], namespace) # nosec # noqa
|
||||||
return
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
hook = sys.__interactivehook__
|
||||||
|
except AttributeError:
|
||||||
|
# Match the behavior of the cpython shell where a missing
|
||||||
|
# sys.__interactivehook__ is ignored.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
hook()
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
# Match the behavior of the cpython shell where an error in
|
||||||
|
# sys.__interactivehook__ prints a warning and the exception
|
||||||
|
# and continues.
|
||||||
|
print("Failed calling sys.__interactivehook__")
|
||||||
|
traceback.print_exc()
|
||||||
# Try to enable tab-complete
|
# Try to enable tab-complete
|
||||||
try:
|
try:
|
||||||
import readline
|
import readline
|
||||||
|
|||||||
@ -2,10 +2,14 @@
|
|||||||
{% get_current_language as LANGUAGE_CODE %}
|
{% get_current_language as LANGUAGE_CODE %}
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
window.authentik = {};
|
window.authentik = {
|
||||||
window.authentik.locale = "{{ LANGUAGE_CODE }}";
|
locale: "{{ LANGUAGE_CODE }}",
|
||||||
window.authentik.config = JSON.parse('{{ config_json|escapejs }}');
|
config: JSON.parse('{{ config_json|escapejs }}'),
|
||||||
window.authentik.tenant = JSON.parse('{{ tenant_json|escapejs }}');
|
tenant: JSON.parse('{{ tenant_json|escapejs }}'),
|
||||||
|
versionFamily: "{{ version_family }}",
|
||||||
|
versionSubdomain: "{{ version_subdomain }}",
|
||||||
|
build: "{{ build }}",
|
||||||
|
};
|
||||||
window.addEventListener("DOMContentLoaded", () => {
|
window.addEventListener("DOMContentLoaded", () => {
|
||||||
{% for message in messages %}
|
{% for message in messages %}
|
||||||
window.dispatchEvent(
|
window.dispatchEvent(
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
"""Test Applications API"""
|
"""Test Applications API"""
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
|
from django.core.files.base import ContentFile
|
||||||
|
from django.test.client import BOUNDARY, MULTIPART_CONTENT, encode_multipart
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
@ -21,7 +23,7 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
redirect_uris="http://some-other-domain",
|
redirect_uris="http://some-other-domain",
|
||||||
authorization_flow=create_test_flow(),
|
authorization_flow=create_test_flow(),
|
||||||
)
|
)
|
||||||
self.allowed = Application.objects.create(
|
self.allowed: Application = Application.objects.create(
|
||||||
name="allowed",
|
name="allowed",
|
||||||
slug="allowed",
|
slug="allowed",
|
||||||
meta_launch_url="https://goauthentik.io/%(username)s",
|
meta_launch_url="https://goauthentik.io/%(username)s",
|
||||||
@ -35,6 +37,31 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
order=0,
|
order=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_set_icon(self):
|
||||||
|
"""Test set_icon"""
|
||||||
|
file = ContentFile(b"text", "name")
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
response = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:application-set-icon",
|
||||||
|
kwargs={"slug": self.allowed.slug},
|
||||||
|
),
|
||||||
|
data=encode_multipart(data={"file": file}, boundary=BOUNDARY),
|
||||||
|
content_type=MULTIPART_CONTENT,
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
app_raw = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:application-detail",
|
||||||
|
kwargs={"slug": self.allowed.slug},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
app = loads(app_raw.content)
|
||||||
|
self.allowed.refresh_from_db()
|
||||||
|
self.assertEqual(self.allowed.get_meta_icon, app["meta_icon"])
|
||||||
|
self.assertEqual(self.allowed.meta_icon.read(), b"text")
|
||||||
|
|
||||||
def test_check_access(self):
|
def test_check_access(self):
|
||||||
"""Test check_access operation"""
|
"""Test check_access operation"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|||||||
69
authentik/core/tests/test_groups_api.py
Normal file
69
authentik/core/tests/test_groups_api.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""Test Groups API"""
|
||||||
|
from django.urls.base import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Group, User
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestGroupsAPI(APITestCase):
|
||||||
|
"""Test Groups API"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.admin = create_test_admin_user()
|
||||||
|
self.user = User.objects.create(username="test-user")
|
||||||
|
|
||||||
|
def test_add_user(self):
|
||||||
|
"""Test add_user"""
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
self.client.force_login(self.admin)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}),
|
||||||
|
data={
|
||||||
|
"pk": self.user.pk,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 204)
|
||||||
|
group.refresh_from_db()
|
||||||
|
self.assertEqual(list(group.users.all()), [self.user])
|
||||||
|
|
||||||
|
def test_add_user_404(self):
|
||||||
|
"""Test add_user"""
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
self.client.force_login(self.admin)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-add-user", kwargs={"pk": group.pk}),
|
||||||
|
data={
|
||||||
|
"pk": self.user.pk + 3,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 404)
|
||||||
|
|
||||||
|
def test_remove_user(self):
|
||||||
|
"""Test remove_user"""
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
group.users.add(self.user)
|
||||||
|
self.client.force_login(self.admin)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}),
|
||||||
|
data={
|
||||||
|
"pk": self.user.pk,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 204)
|
||||||
|
group.refresh_from_db()
|
||||||
|
self.assertEqual(list(group.users.all()), [])
|
||||||
|
|
||||||
|
def test_remove_user_404(self):
|
||||||
|
"""Test remove_user"""
|
||||||
|
group = Group.objects.create(name=generate_id())
|
||||||
|
group.users.add(self.user)
|
||||||
|
self.client.force_login(self.admin)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_api:group-remove-user", kwargs={"pk": group.pk}),
|
||||||
|
data={
|
||||||
|
"pk": self.user.pk + 3,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 404)
|
||||||
@ -6,6 +6,8 @@ from django.shortcuts import get_object_or_404
|
|||||||
from django.views.generic.base import TemplateView
|
from django.views.generic.base import TemplateView
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
|
||||||
|
from authentik import get_build_hash
|
||||||
|
from authentik.admin.tasks import LOCAL_VERSION
|
||||||
from authentik.api.v3.config import ConfigView
|
from authentik.api.v3.config import ConfigView
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.tenants.api import CurrentTenantSerializer
|
from authentik.tenants.api import CurrentTenantSerializer
|
||||||
@ -17,6 +19,9 @@ class InterfaceView(TemplateView):
|
|||||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||||
kwargs["config_json"] = dumps(ConfigView(request=Request(self.request)).get_config().data)
|
kwargs["config_json"] = dumps(ConfigView(request=Request(self.request)).get_config().data)
|
||||||
kwargs["tenant_json"] = dumps(CurrentTenantSerializer(self.request.tenant).data)
|
kwargs["tenant_json"] = dumps(CurrentTenantSerializer(self.request.tenant).data)
|
||||||
|
kwargs["version_family"] = f"{LOCAL_VERSION.major}.{LOCAL_VERSION.minor}"
|
||||||
|
kwargs["version_subdomain"] = f"version-{LOCAL_VERSION.major}-{LOCAL_VERSION.minor}"
|
||||||
|
kwargs["build"] = get_build_hash()
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
"""Events middleware"""
|
"""Events middleware"""
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Callable
|
from threading import Thread
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.sessions.models import Session
|
from django.contrib.sessions.models import Session
|
||||||
@ -13,7 +14,6 @@ from guardian.models import UserObjectPermission
|
|||||||
|
|
||||||
from authentik.core.models import AuthenticatedSession, User
|
from authentik.core.models import AuthenticatedSession, User
|
||||||
from authentik.events.models import Event, EventAction, Notification
|
from authentik.events.models import Event, EventAction, Notification
|
||||||
from authentik.events.signals import EventNewThread
|
|
||||||
from authentik.events.utils import model_to_dict
|
from authentik.events.utils import model_to_dict
|
||||||
from authentik.flows.models import FlowToken
|
from authentik.flows.models import FlowToken
|
||||||
from authentik.lib.sentry import before_send
|
from authentik.lib.sentry import before_send
|
||||||
@ -37,6 +37,25 @@ def should_log_model(model: Model) -> bool:
|
|||||||
return not isinstance(model, IGNORED_MODELS)
|
return not isinstance(model, IGNORED_MODELS)
|
||||||
|
|
||||||
|
|
||||||
|
class EventNewThread(Thread):
|
||||||
|
"""Create Event in background thread"""
|
||||||
|
|
||||||
|
action: str
|
||||||
|
request: HttpRequest
|
||||||
|
kwargs: dict[str, Any]
|
||||||
|
user: Optional[User] = None
|
||||||
|
|
||||||
|
def __init__(self, action: str, request: HttpRequest, user: Optional[User] = None, **kwargs):
|
||||||
|
super().__init__()
|
||||||
|
self.action = action
|
||||||
|
self.request = request
|
||||||
|
self.user = user
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
Event.new(self.action, **self.kwargs).from_http(self.request, user=self.user)
|
||||||
|
|
||||||
|
|
||||||
class AuditMiddleware:
|
class AuditMiddleware:
|
||||||
"""Register handlers for duration of request-response that log creation/update/deletion
|
"""Register handlers for duration of request-response that log creation/update/deletion
|
||||||
of models"""
|
of models"""
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
"""authentik events models"""
|
"""authentik events models"""
|
||||||
import time
|
import time
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from copy import deepcopy
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from inspect import currentframe
|
from inspect import currentframe
|
||||||
from smtplib import SMTPException
|
from smtplib import SMTPException
|
||||||
@ -46,7 +45,7 @@ from authentik.stages.email.utils import TemplateEmailMessage
|
|||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
from authentik.tenants.utils import DEFAULT_TENANT
|
from authentik.tenants.utils import DEFAULT_TENANT
|
||||||
|
|
||||||
LOGGER = get_logger("authentik.events")
|
LOGGER = get_logger()
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
@ -211,7 +210,7 @@ class Event(SerializerModel, ExpiringModel):
|
|||||||
current = currentframe()
|
current = currentframe()
|
||||||
parent = current.f_back
|
parent = current.f_back
|
||||||
app = parent.f_globals["__name__"]
|
app = parent.f_globals["__name__"]
|
||||||
cleaned_kwargs = cleanse_dict(sanitize_dict(deepcopy(kwargs)))
|
cleaned_kwargs = cleanse_dict(sanitize_dict(kwargs))
|
||||||
event = Event(action=action, app=app, context=cleaned_kwargs)
|
event = Event(action=action, app=app, context=cleaned_kwargs)
|
||||||
return event
|
return event
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
"""authentik events signal listener"""
|
"""authentik events signal listener"""
|
||||||
from threading import Thread
|
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||||
@ -19,63 +18,40 @@ from authentik.stages.invitation.signals import invitation_used
|
|||||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||||
from authentik.stages.user_write.signals import user_write
|
from authentik.stages.user_write.signals import user_write
|
||||||
|
|
||||||
|
SESSION_LOGIN_EVENT = "login_event"
|
||||||
class EventNewThread(Thread):
|
|
||||||
"""Create Event in background thread"""
|
|
||||||
|
|
||||||
action: str
|
|
||||||
request: HttpRequest
|
|
||||||
kwargs: dict[str, Any]
|
|
||||||
user: Optional[User] = None
|
|
||||||
|
|
||||||
def __init__(self, action: str, request: HttpRequest, user: Optional[User] = None, **kwargs):
|
|
||||||
super().__init__()
|
|
||||||
self.action = action
|
|
||||||
self.request = request
|
|
||||||
self.user = user
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
Event.new(self.action, **self.kwargs).from_http(self.request, user=self.user)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_in)
|
@receiver(user_logged_in)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def on_user_logged_in(sender, request: HttpRequest, user: User, **_):
|
def on_user_logged_in(sender, request: HttpRequest, user: User, **_):
|
||||||
"""Log successful login"""
|
"""Log successful login"""
|
||||||
thread = EventNewThread(EventAction.LOGIN, request)
|
kwargs = {}
|
||||||
if SESSION_KEY_PLAN in request.session:
|
if SESSION_KEY_PLAN in request.session:
|
||||||
flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
flow_plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||||
if PLAN_CONTEXT_SOURCE in flow_plan.context:
|
if PLAN_CONTEXT_SOURCE in flow_plan.context:
|
||||||
# Login request came from an external source, save it in the context
|
# Login request came from an external source, save it in the context
|
||||||
thread.kwargs[PLAN_CONTEXT_SOURCE] = flow_plan.context[PLAN_CONTEXT_SOURCE]
|
kwargs[PLAN_CONTEXT_SOURCE] = flow_plan.context[PLAN_CONTEXT_SOURCE]
|
||||||
if PLAN_CONTEXT_METHOD in flow_plan.context:
|
if PLAN_CONTEXT_METHOD in flow_plan.context:
|
||||||
thread.kwargs[PLAN_CONTEXT_METHOD] = flow_plan.context[PLAN_CONTEXT_METHOD]
|
|
||||||
# Save the login method used
|
# Save the login method used
|
||||||
thread.kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(
|
kwargs[PLAN_CONTEXT_METHOD] = flow_plan.context[PLAN_CONTEXT_METHOD]
|
||||||
PLAN_CONTEXT_METHOD_ARGS, {}
|
kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
)
|
event = Event.new(EventAction.LOGIN, **kwargs).from_http(request, user=user)
|
||||||
thread.user = user
|
request.session[SESSION_LOGIN_EVENT] = event
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def on_user_logged_out(sender, request: HttpRequest, user: User, **_):
|
def on_user_logged_out(sender, request: HttpRequest, user: User, **_):
|
||||||
"""Log successfully logout"""
|
"""Log successfully logout"""
|
||||||
thread = EventNewThread(EventAction.LOGOUT, request)
|
Event.new(EventAction.LOGOUT).from_http(request, user=user)
|
||||||
thread.user = user
|
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_write)
|
@receiver(user_write)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def on_user_write(sender, request: HttpRequest, user: User, data: dict[str, Any], **kwargs):
|
def on_user_write(sender, request: HttpRequest, user: User, data: dict[str, Any], **kwargs):
|
||||||
"""Log User write"""
|
"""Log User write"""
|
||||||
thread = EventNewThread(EventAction.USER_WRITE, request, **data)
|
data["created"] = kwargs.get("created", False)
|
||||||
thread.kwargs["created"] = kwargs.get("created", False)
|
Event.new(EventAction.USER_WRITE, **data).from_http(request, user=user)
|
||||||
thread.user = user
|
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(login_failed)
|
@receiver(login_failed)
|
||||||
@ -89,26 +65,23 @@ def on_login_failed(
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
"""Failed Login, authentik custom event"""
|
"""Failed Login, authentik custom event"""
|
||||||
thread = EventNewThread(EventAction.LOGIN_FAILED, request, **credentials, stage=stage, **kwargs)
|
Event.new(EventAction.LOGIN_FAILED, **credentials, stage=stage, **kwargs).from_http(request)
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(invitation_used)
|
@receiver(invitation_used)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_):
|
def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_):
|
||||||
"""Log Invitation usage"""
|
"""Log Invitation usage"""
|
||||||
thread = EventNewThread(
|
Event.new(EventAction.INVITE_USED, invitation_uuid=invitation.invite_uuid.hex).from_http(
|
||||||
EventAction.INVITE_USED, request, invitation_uuid=invitation.invite_uuid.hex
|
request
|
||||||
)
|
)
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(password_changed)
|
@receiver(password_changed)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def on_password_changed(sender, user: User, password: str, **_):
|
def on_password_changed(sender, user: User, password: str, **_):
|
||||||
"""Log password change"""
|
"""Log password change"""
|
||||||
thread = EventNewThread(EventAction.PASSWORD_SET, None, user=user)
|
Event.new(EventAction.PASSWORD_SET).from_http(None, user=user)
|
||||||
thread.run()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Event)
|
@receiver(post_save, sender=Event)
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
"""event utilities"""
|
"""event utilities"""
|
||||||
import re
|
import re
|
||||||
|
from copy import copy
|
||||||
from dataclasses import asdict, is_dataclass
|
from dataclasses import asdict, is_dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import GeneratorType
|
from types import GeneratorType
|
||||||
@ -87,9 +88,15 @@ def sanitize_item(value: Any) -> Any:
|
|||||||
"""Sanitize a single item, ensure it is JSON parsable"""
|
"""Sanitize a single item, ensure it is JSON parsable"""
|
||||||
if is_dataclass(value):
|
if is_dataclass(value):
|
||||||
# Because asdict calls `copy.deepcopy(obj)` on everything that's not tuple/dict,
|
# Because asdict calls `copy.deepcopy(obj)` on everything that's not tuple/dict,
|
||||||
# and deepcopy doesn't work with HttpRequests (neither django nor rest_framework).
|
# and deepcopy doesn't work with HttpRequest (neither django nor rest_framework).
|
||||||
|
# (more specifically doesn't work with ResolverMatch)
|
||||||
|
# rest_framework's custom Request class makes this more complicated as it also holds a
|
||||||
|
# thread lock.
|
||||||
|
# Since this class is mainly used for Events which already hold the http request context
|
||||||
|
# we just remove the http_request from the shallow policy request
|
||||||
# Currently, the only dataclass that actually holds an http request is a PolicyRequest
|
# Currently, the only dataclass that actually holds an http request is a PolicyRequest
|
||||||
if isinstance(value, PolicyRequest):
|
if isinstance(value, PolicyRequest) and value.http_request is not None:
|
||||||
|
value: PolicyRequest = copy(value)
|
||||||
value.http_request = None
|
value.http_request = None
|
||||||
value = asdict(value)
|
value = asdict(value)
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
|
|||||||
@ -71,6 +71,7 @@ class FlowSerializer(ModelSerializer):
|
|||||||
"export_url",
|
"export_url",
|
||||||
"layout",
|
"layout",
|
||||||
"denied_action",
|
"denied_action",
|
||||||
|
"authentication",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"background": {"read_only": True},
|
"background": {"read_only": True},
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
"""flow exceptions"""
|
"""flow exceptions"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
@ -6,15 +8,15 @@ from authentik.policies.types import PolicyResult
|
|||||||
|
|
||||||
|
|
||||||
class FlowNonApplicableException(SentryIgnoredException):
|
class FlowNonApplicableException(SentryIgnoredException):
|
||||||
"""Flow does not apply to current user (denied by policy)."""
|
"""Flow does not apply to current user (denied by policy, or otherwise)."""
|
||||||
|
|
||||||
policy_result: PolicyResult
|
policy_result: Optional[PolicyResult] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def messages(self) -> str:
|
def messages(self) -> str:
|
||||||
"""Get messages from policy result, fallback to generic reason"""
|
"""Get messages from policy result, fallback to generic reason"""
|
||||||
if len(self.policy_result.messages) < 1:
|
if not self.policy_result or len(self.policy_result.messages) < 1:
|
||||||
return _("Flow does not apply to current user (denied by policy).")
|
return _("Flow does not apply to current user.")
|
||||||
return "\n".join(self.policy_result.messages)
|
return "\n".join(self.policy_result.messages)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
27
authentik/flows/migrations/0024_flow_authentication.py
Normal file
27
authentik/flows/migrations/0024_flow_authentication.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2022-11-30 09:04
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_flows", "0023_flow_denied_action"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="flow",
|
||||||
|
name="authentication",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
("none", "None"),
|
||||||
|
("require_authenticated", "Require Authenticated"),
|
||||||
|
("require_unauthenticated", "Require Unauthenticated"),
|
||||||
|
("require_superuser", "Require Superuser"),
|
||||||
|
],
|
||||||
|
default="none",
|
||||||
|
help_text="Required level of authentication and authorization to access a flow.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -14,6 +14,7 @@ from authentik.core.models import Token
|
|||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.flows.challenge import FlowLayout
|
from authentik.flows.challenge import FlowLayout
|
||||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
||||||
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
from authentik.policies.models import PolicyBindingModel
|
from authentik.policies.models import PolicyBindingModel
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -23,6 +24,15 @@ if TYPE_CHECKING:
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class FlowAuthenticationRequirement(models.TextChoices):
|
||||||
|
"""Required level of authentication and authorization to access a flow"""
|
||||||
|
|
||||||
|
NONE = "none"
|
||||||
|
REQUIRE_AUTHENTICATED = "require_authenticated"
|
||||||
|
REQUIRE_UNAUTHENTICATED = "require_unauthenticated"
|
||||||
|
REQUIRE_SUPERUSER = "require_superuser"
|
||||||
|
|
||||||
|
|
||||||
class NotConfiguredAction(models.TextChoices):
|
class NotConfiguredAction(models.TextChoices):
|
||||||
"""Decides how the FlowExecutor should proceed when a stage isn't configured"""
|
"""Decides how the FlowExecutor should proceed when a stage isn't configured"""
|
||||||
|
|
||||||
@ -101,6 +111,8 @@ def in_memory_stage(view: type["StageView"], **kwargs) -> Stage:
|
|||||||
# we set the view as a separate property and reference a generic function
|
# we set the view as a separate property and reference a generic function
|
||||||
# that returns that member
|
# that returns that member
|
||||||
setattr(stage, "__in_memory_type", view)
|
setattr(stage, "__in_memory_type", view)
|
||||||
|
setattr(stage, "name", _("Dynamic In-memory stage: %(doc)s" % {"doc": view.__doc__}))
|
||||||
|
setattr(stage._meta, "verbose_name", class_to_path(view))
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
setattr(stage, key, value)
|
setattr(stage, key, value)
|
||||||
return stage
|
return stage
|
||||||
@ -152,6 +164,12 @@ class Flow(SerializerModel, PolicyBindingModel):
|
|||||||
help_text=_("Configure what should happen when a flow denies access to a user."),
|
help_text=_("Configure what should happen when a flow denies access to a user."),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
authentication = models.TextField(
|
||||||
|
choices=FlowAuthenticationRequirement.choices,
|
||||||
|
default=FlowAuthenticationRequirement.NONE,
|
||||||
|
help_text=_("Required level of authentication and authorization to access a flow."),
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def background_url(self) -> str:
|
def background_url(self) -> str:
|
||||||
"""Get the URL to the background image. If the name is /static or starts with http
|
"""Get the URL to the background image. If the name is /static or starts with http
|
||||||
|
|||||||
@ -13,7 +13,14 @@ from authentik.events.models import cleanse_dict
|
|||||||
from authentik.flows.apps import HIST_FLOWS_PLAN_TIME
|
from authentik.flows.apps import HIST_FLOWS_PLAN_TIME
|
||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage, in_memory_stage
|
from authentik.flows.models import (
|
||||||
|
Flow,
|
||||||
|
FlowAuthenticationRequirement,
|
||||||
|
FlowDesignation,
|
||||||
|
FlowStageBinding,
|
||||||
|
Stage,
|
||||||
|
in_memory_stage,
|
||||||
|
)
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
|
|
||||||
@ -117,11 +124,30 @@ class FlowPlanner:
|
|||||||
self.flow = flow
|
self.flow = flow
|
||||||
self._logger = get_logger().bind(flow_slug=flow.slug)
|
self._logger = get_logger().bind(flow_slug=flow.slug)
|
||||||
|
|
||||||
|
def _check_authentication(self, request: HttpRequest):
|
||||||
|
"""Check the flow's authentication level is matched by `request`"""
|
||||||
|
if (
|
||||||
|
self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_AUTHENTICATED
|
||||||
|
and not request.user.is_authenticated
|
||||||
|
):
|
||||||
|
raise FlowNonApplicableException()
|
||||||
|
if (
|
||||||
|
self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED
|
||||||
|
and request.user.is_authenticated
|
||||||
|
):
|
||||||
|
raise FlowNonApplicableException()
|
||||||
|
if (
|
||||||
|
self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_SUPERUSER
|
||||||
|
and not request.user.is_superuser
|
||||||
|
):
|
||||||
|
raise FlowNonApplicableException()
|
||||||
|
|
||||||
def plan(
|
def plan(
|
||||||
self, request: HttpRequest, default_context: Optional[dict[str, Any]] = None
|
self, request: HttpRequest, default_context: Optional[dict[str, Any]] = None
|
||||||
) -> FlowPlan:
|
) -> FlowPlan:
|
||||||
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
||||||
and return ordered list"""
|
and return ordered list"""
|
||||||
|
self._check_authentication(request)
|
||||||
with Hub.current.start_span(
|
with Hub.current.start_span(
|
||||||
op="authentik.flow.planner.plan", description=self.flow.slug
|
op="authentik.flow.planner.plan", description=self.flow.slug
|
||||||
) as span:
|
) as span:
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
"""flow planner tests"""
|
"""flow planner tests"""
|
||||||
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
||||||
|
|
||||||
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.contrib.sessions.middleware import SessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
@ -8,10 +9,10 @@ from django.urls import reverse
|
|||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.core.tests.utils import create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||||
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
from authentik.flows.models import FlowAuthenticationRequirement, FlowDesignation, FlowStageBinding
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner, cache_key
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner, cache_key
|
||||||
from authentik.lib.tests.utils import dummy_get_response
|
from authentik.lib.tests.utils import dummy_get_response
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
@ -43,6 +44,30 @@ class TestFlowPlanner(TestCase):
|
|||||||
planner = FlowPlanner(flow)
|
planner = FlowPlanner(flow)
|
||||||
planner.plan(request)
|
planner.plan(request)
|
||||||
|
|
||||||
|
def test_authentication(self):
|
||||||
|
"""Test flow authentication"""
|
||||||
|
flow = create_test_flow()
|
||||||
|
flow.authentication = FlowAuthenticationRequirement.NONE
|
||||||
|
request = self.request_factory.get(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
|
)
|
||||||
|
request.user = AnonymousUser()
|
||||||
|
planner = FlowPlanner(flow)
|
||||||
|
planner.allow_empty_flows = True
|
||||||
|
planner.plan(request)
|
||||||
|
|
||||||
|
with self.assertRaises(FlowNonApplicableException):
|
||||||
|
flow.authentication = FlowAuthenticationRequirement.REQUIRE_AUTHENTICATED
|
||||||
|
FlowPlanner(flow).plan(request)
|
||||||
|
with self.assertRaises(FlowNonApplicableException):
|
||||||
|
flow.authentication = FlowAuthenticationRequirement.REQUIRE_SUPERUSER
|
||||||
|
FlowPlanner(flow).plan(request)
|
||||||
|
|
||||||
|
request.user = create_test_admin_user()
|
||||||
|
planner = FlowPlanner(flow)
|
||||||
|
planner.allow_empty_flows = True
|
||||||
|
planner.plan(request)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"authentik.policies.engine.PolicyEngine.result",
|
"authentik.policies.engine.PolicyEngine.result",
|
||||||
POLICY_RETURN_FALSE,
|
POLICY_RETURN_FALSE,
|
||||||
|
|||||||
@ -378,7 +378,9 @@ class FlowExecutorView(APIView):
|
|||||||
# an expression policy or authentik itself, so we don't
|
# an expression policy or authentik itself, so we don't
|
||||||
# check if its an absolute URL or a relative one
|
# check if its an absolute URL or a relative one
|
||||||
self.cancel()
|
self.cancel()
|
||||||
return redirect(self.plan.context.get(PLAN_CONTEXT_REDIRECT))
|
return to_stage_response(
|
||||||
|
self.request, redirect(self.plan.context.get(PLAN_CONTEXT_REDIRECT))
|
||||||
|
)
|
||||||
next_param = self.request.session.get(SESSION_KEY_GET, {}).get(
|
next_param = self.request.session.get(SESSION_KEY_GET, {}).get(
|
||||||
NEXT_ARG_NAME, "authentik_core:root-redirect"
|
NEXT_ARG_NAME, "authentik_core:root-redirect"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -29,10 +29,9 @@ debug: false
|
|||||||
|
|
||||||
log_level: info
|
log_level: info
|
||||||
|
|
||||||
# Error reporting, sends stacktrace to sentry.beryju.org
|
|
||||||
error_reporting:
|
error_reporting:
|
||||||
enabled: false
|
enabled: false
|
||||||
sentry_dsn: https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8
|
sentry_dsn: https://151ba72610234c4c97c5bcff4e1cffd8@o4504163616882688.ingest.sentry.io/4504163677503489
|
||||||
environment: customer
|
environment: customer
|
||||||
send_pii: false
|
send_pii: false
|
||||||
sample_rate: 0.1
|
sample_rate: 0.1
|
||||||
|
|||||||
@ -42,7 +42,7 @@ class BaseEvaluator:
|
|||||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||||
"ak_user_has_authenticator": BaseEvaluator.expr_func_user_has_authenticator,
|
"ak_user_has_authenticator": BaseEvaluator.expr_func_user_has_authenticator,
|
||||||
"ak_create_event": self.expr_event_create,
|
"ak_create_event": self.expr_event_create,
|
||||||
"ak_logger": get_logger(self._filename),
|
"ak_logger": get_logger(self._filename).bind(),
|
||||||
"requests": get_http_session(),
|
"requests": get_http_session(),
|
||||||
"ip_address": ip_address,
|
"ip_address": ip_address,
|
||||||
"ip_network": ip_network,
|
"ip_network": ip_network,
|
||||||
|
|||||||
@ -66,6 +66,9 @@ def sentry_init(**sentry_init_kwargs):
|
|||||||
kwargs = {
|
kwargs = {
|
||||||
"environment": sentry_env,
|
"environment": sentry_env,
|
||||||
"send_default_pii": CONFIG.y_bool("error_reporting.send_pii", False),
|
"send_default_pii": CONFIG.y_bool("error_reporting.send_pii", False),
|
||||||
|
"_experiments": {
|
||||||
|
"profiles_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.1)),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
kwargs.update(**sentry_init_kwargs)
|
kwargs.update(**sentry_init_kwargs)
|
||||||
# pylint: disable=abstract-class-instantiated
|
# pylint: disable=abstract-class-instantiated
|
||||||
|
|||||||
@ -24,17 +24,17 @@ class FilePathSerializer(PassiveSerializer):
|
|||||||
url = CharField()
|
url = CharField()
|
||||||
|
|
||||||
|
|
||||||
def set_file(request: Request, obj: Model, field: str):
|
def set_file(request: Request, obj: Model, field_name: str):
|
||||||
"""Upload file"""
|
"""Upload file"""
|
||||||
field = getattr(obj, field)
|
field = getattr(obj, field_name)
|
||||||
icon = request.FILES.get("file", None)
|
file = request.FILES.get("file", None)
|
||||||
clear = request.data.get("clear", "false").lower() == "true"
|
clear = request.data.get("clear", "false").lower() == "true"
|
||||||
if clear:
|
if clear:
|
||||||
# .delete() saves the model by default
|
# .delete() saves the model by default
|
||||||
field.delete()
|
field.delete()
|
||||||
return Response({})
|
return Response({})
|
||||||
if icon:
|
if file:
|
||||||
field = icon
|
setattr(obj, field_name, file)
|
||||||
try:
|
try:
|
||||||
obj.save()
|
obj.save()
|
||||||
except PermissionError as exc:
|
except PermissionError as exc:
|
||||||
|
|||||||
@ -87,7 +87,7 @@ class PolicyEvaluator(BaseEvaluator):
|
|||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Expression policy returned None",
|
"Expression policy returned None",
|
||||||
src=expression_source,
|
src=expression_source,
|
||||||
req=self._context,
|
policy=self._filename,
|
||||||
)
|
)
|
||||||
policy_result.passing = False
|
policy_result.passing = False
|
||||||
if result:
|
if result:
|
||||||
|
|||||||
19
authentik/policies/migrations/0009_alter_policy_name.py
Normal file
19
authentik/policies/migrations/0009_alter_policy_name.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 4.1.4 on 2022-12-25 13:46
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies", "0008_policybinding_authentik_p_policy__534e15_idx_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="policy",
|
||||||
|
name="name",
|
||||||
|
field=models.TextField(default="unnamed-policy"),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -159,7 +159,7 @@ class Policy(SerializerModel, CreatedUpdatedModel):
|
|||||||
|
|
||||||
policy_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
policy_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
|
||||||
name = models.TextField(blank=True, null=True)
|
name = models.TextField()
|
||||||
|
|
||||||
execution_logging = models.BooleanField(
|
execution_logging = models.BooleanField(
|
||||||
default=False,
|
default=False,
|
||||||
|
|||||||
@ -150,6 +150,8 @@ class PasswordPolicy(Policy):
|
|||||||
results = zxcvbn(password[:100], user_inputs)
|
results = zxcvbn(password[:100], user_inputs)
|
||||||
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
||||||
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
||||||
|
if not result.passing:
|
||||||
|
result.messages += tuple((_("Password is too weak."),))
|
||||||
if isinstance(results["feedback"]["warning"], list):
|
if isinstance(results["feedback"]["warning"], list):
|
||||||
result.messages += tuple(results["feedback"]["warning"])
|
result.messages += tuple(results["feedback"]["warning"])
|
||||||
if isinstance(results["feedback"]["suggestions"], list):
|
if isinstance(results["feedback"]["suggestions"], list):
|
||||||
|
|||||||
@ -28,13 +28,21 @@ class TestPasswordPolicyZxcvbn(TestCase):
|
|||||||
policy = PasswordPolicy.objects.create(
|
policy = PasswordPolicy.objects.create(
|
||||||
check_zxcvbn=True,
|
check_zxcvbn=True,
|
||||||
check_static_rules=False,
|
check_static_rules=False,
|
||||||
|
zxcvbn_score_threshold=3,
|
||||||
name="test_false",
|
name="test_false",
|
||||||
)
|
)
|
||||||
request = PolicyRequest(get_anonymous_user())
|
request = PolicyRequest(get_anonymous_user())
|
||||||
request.context[PLAN_CONTEXT_PROMPT] = {"password": "password"} # nosec
|
request.context[PLAN_CONTEXT_PROMPT] = {"password": "password"} # nosec
|
||||||
result: PolicyResult = policy.passes(request)
|
result: PolicyResult = policy.passes(request)
|
||||||
self.assertFalse(result.passing, result.messages)
|
self.assertFalse(result.passing, result.messages)
|
||||||
self.assertEqual(result.messages[0], "Add another word or two. Uncommon words are better.")
|
self.assertEqual(result.messages[0], "Password is too weak.")
|
||||||
|
self.assertEqual(result.messages[1], "Add another word or two. Uncommon words are better.")
|
||||||
|
|
||||||
|
request.context[PLAN_CONTEXT_PROMPT] = {"password": "Awdccdw1234"} # nosec
|
||||||
|
result: PolicyResult = policy.passes(request)
|
||||||
|
self.assertFalse(result.passing, result.messages)
|
||||||
|
self.assertEqual(result.messages[0], "Password is too weak.")
|
||||||
|
self.assertEqual(len(result.messages), 1)
|
||||||
|
|
||||||
def test_true(self):
|
def test_true(self):
|
||||||
"""Positive password case"""
|
"""Positive password case"""
|
||||||
|
|||||||
@ -101,12 +101,14 @@ class PolicyProcess(PROCESS_CLASS):
|
|||||||
LOGGER.debug("P_ENG(proc): error", exc=src_exc)
|
LOGGER.debug("P_ENG(proc): error", exc=src_exc)
|
||||||
policy_result = PolicyResult(False, str(src_exc))
|
policy_result = PolicyResult(False, str(src_exc))
|
||||||
policy_result.source_binding = self.binding
|
policy_result.source_binding = self.binding
|
||||||
if self.request.should_cache:
|
should_cache = self.request.should_cache
|
||||||
|
if should_cache:
|
||||||
key = cache_key(self.binding, self.request)
|
key = cache_key(self.binding, self.request)
|
||||||
cache.set(key, policy_result, CACHE_TIMEOUT)
|
cache.set(key, policy_result, CACHE_TIMEOUT)
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"P_ENG(proc): finished and cached ",
|
"P_ENG(proc): finished",
|
||||||
policy=self.binding.policy,
|
policy=self.binding.policy,
|
||||||
|
cached=should_cache,
|
||||||
result=policy_result,
|
result=policy_result,
|
||||||
# this is used for filtering in access checking where logs are sent to the admin
|
# this is used for filtering in access checking where logs are sent to the admin
|
||||||
process="PolicyProcess",
|
process="PolicyProcess",
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
|
from django.urls import resolve, reverse
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import Application, Group, User
|
from authentik.core.models import Application, Group, User
|
||||||
@ -129,8 +130,9 @@ class TestPolicyProcess(TestCase):
|
|||||||
)
|
)
|
||||||
binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test"))
|
binding = PolicyBinding(policy=policy, target=Application.objects.create(name="test"))
|
||||||
|
|
||||||
http_request = self.factory.get("/")
|
http_request = self.factory.get(reverse("authentik_core:impersonate-end"))
|
||||||
http_request.user = self.user
|
http_request.user = self.user
|
||||||
|
http_request.resolver_match = resolve(reverse("authentik_core:impersonate-end"))
|
||||||
|
|
||||||
request = PolicyRequest(self.user)
|
request = PolicyRequest(self.user)
|
||||||
request.set_http_request(http_request)
|
request.set_http_request(http_request)
|
||||||
|
|||||||
@ -8,11 +8,12 @@ from rest_framework.request import Request
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.providers import ProviderSerializer
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer, PropertyMappingPreviewSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.providers.oauth2.models import OAuth2Provider
|
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken, ScopeMapping
|
||||||
|
|
||||||
|
|
||||||
class OAuth2ProviderSerializer(ProviderSerializer):
|
class OAuth2ProviderSerializer(ProviderSerializer):
|
||||||
@ -115,7 +116,7 @@ class OAuth2ProviderViewSet(UsedByMixin, ModelViewSet):
|
|||||||
)
|
)
|
||||||
data["logout"] = request.build_absolute_uri(
|
data["logout"] = request.build_absolute_uri(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_core:if-session-end",
|
"authentik_providers_oauth2:end-session",
|
||||||
kwargs={"application_slug": provider.application.slug},
|
kwargs={"application_slug": provider.application.slug},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -128,3 +129,28 @@ class OAuth2ProviderViewSet(UsedByMixin, ModelViewSet):
|
|||||||
except Provider.application.RelatedObjectDoesNotExist: # pylint: disable=no-member
|
except Provider.application.RelatedObjectDoesNotExist: # pylint: disable=no-member
|
||||||
pass
|
pass
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
@permission_required(
|
||||||
|
"authentik_providers_oauth2.view_oauth2provider",
|
||||||
|
)
|
||||||
|
@extend_schema(
|
||||||
|
responses={
|
||||||
|
200: PropertyMappingPreviewSerializer(),
|
||||||
|
400: OpenApiResponse(description="Bad request"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["GET"])
|
||||||
|
# pylint: disable=invalid-name, unused-argument
|
||||||
|
def preview_user(self, request: Request, pk: int) -> Response:
|
||||||
|
"""Preview user data for provider"""
|
||||||
|
provider: OAuth2Provider = self.get_object()
|
||||||
|
temp_token = RefreshToken()
|
||||||
|
temp_token.scope = ScopeMapping.objects.filter(provider=provider).values_list(
|
||||||
|
"scope_name", flat=True
|
||||||
|
)
|
||||||
|
temp_token.provider = provider
|
||||||
|
temp_token.user = request.user
|
||||||
|
serializer = PropertyMappingPreviewSerializer(
|
||||||
|
instance={"preview": temp_token.create_id_token(request.user, request).to_dict()}
|
||||||
|
)
|
||||||
|
return Response(serializer.data)
|
||||||
@ -12,7 +12,7 @@ from rest_framework.viewsets import GenericViewSet
|
|||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.users import UserSerializer
|
from authentik.core.api.users import UserSerializer
|
||||||
from authentik.core.api.utils import MetaNameSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderSerializer
|
from authentik.providers.oauth2.api.providers import OAuth2ProviderSerializer
|
||||||
from authentik.providers.oauth2.models import AuthorizationCode, RefreshToken
|
from authentik.providers.oauth2.models import AuthorizationCode, RefreshToken
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -31,3 +31,9 @@ SCOPE_GITHUB_USER_EMAIL = "user:email"
|
|||||||
SCOPE_GITHUB_ORG_READ = "read:org"
|
SCOPE_GITHUB_ORG_READ = "read:org"
|
||||||
|
|
||||||
ACR_AUTHENTIK_DEFAULT = "goauthentik.io/providers/oauth2/default"
|
ACR_AUTHENTIK_DEFAULT = "goauthentik.io/providers/oauth2/default"
|
||||||
|
|
||||||
|
# https://datatracker.ietf.org/doc/html/draft-ietf-oauth-amr-values-06#section-2
|
||||||
|
AMR_PASSWORD = "pwd" # nosec
|
||||||
|
AMR_MFA = "mfa"
|
||||||
|
AMR_OTP = "otp"
|
||||||
|
AMR_WEBAUTHN = "user"
|
||||||
|
|||||||
@ -4,12 +4,14 @@ import binascii
|
|||||||
import json
|
import json
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from functools import cached_property
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
@ -20,14 +22,20 @@ from rest_framework.serializers import Serializer
|
|||||||
|
|
||||||
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User
|
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event
|
||||||
from authentik.events.utils import get_user
|
from authentik.events.signals import SESSION_LOGIN_EVENT
|
||||||
from authentik.lib.generators import generate_code_fixed_length, generate_id, generate_key
|
from authentik.lib.generators import generate_code_fixed_length, generate_id, generate_key
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.utils.time import timedelta_string_validator
|
from authentik.lib.utils.time import timedelta_string_validator
|
||||||
from authentik.providers.oauth2.apps import AuthentikProviderOAuth2Config
|
from authentik.providers.oauth2.apps import AuthentikProviderOAuth2Config
|
||||||
from authentik.providers.oauth2.constants import ACR_AUTHENTIK_DEFAULT
|
from authentik.providers.oauth2.constants import (
|
||||||
|
ACR_AUTHENTIK_DEFAULT,
|
||||||
|
AMR_MFA,
|
||||||
|
AMR_PASSWORD,
|
||||||
|
AMR_WEBAUTHN,
|
||||||
|
)
|
||||||
from authentik.sources.oauth.models import OAuthSource
|
from authentik.sources.oauth.models import OAuthSource
|
||||||
|
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||||
|
|
||||||
|
|
||||||
class ClientTypes(models.TextChoices):
|
class ClientTypes(models.TextChoices):
|
||||||
@ -122,7 +130,7 @@ class ScopeMapping(PropertyMapping):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.providers.oauth2.api.scope import ScopeMappingSerializer
|
from authentik.providers.oauth2.api.scopes import ScopeMappingSerializer
|
||||||
|
|
||||||
return ScopeMappingSerializer
|
return ScopeMappingSerializer
|
||||||
|
|
||||||
@ -253,7 +261,8 @@ class OAuth2Provider(Provider):
|
|||||||
token.access_token = token.create_access_token(user, request)
|
token.access_token = token.create_access_token(user, request)
|
||||||
return token
|
return token
|
||||||
|
|
||||||
def get_jwt_key(self) -> tuple[str, str]:
|
@cached_property
|
||||||
|
def jwt_key(self) -> tuple[str | PRIVATE_KEY_TYPES, str]:
|
||||||
"""Get either the configured certificate or the client secret"""
|
"""Get either the configured certificate or the client secret"""
|
||||||
if not self.signing_key:
|
if not self.signing_key:
|
||||||
# No Certificate at all, assume HS256
|
# No Certificate at all, assume HS256
|
||||||
@ -261,9 +270,9 @@ class OAuth2Provider(Provider):
|
|||||||
key: CertificateKeyPair = self.signing_key
|
key: CertificateKeyPair = self.signing_key
|
||||||
private_key = key.private_key
|
private_key = key.private_key
|
||||||
if isinstance(private_key, RSAPrivateKey):
|
if isinstance(private_key, RSAPrivateKey):
|
||||||
return key.key_data, JWTAlgorithms.RS256
|
return private_key, JWTAlgorithms.RS256
|
||||||
if isinstance(private_key, EllipticCurvePrivateKey):
|
if isinstance(private_key, EllipticCurvePrivateKey):
|
||||||
return key.key_data, JWTAlgorithms.ES256
|
return private_key, JWTAlgorithms.ES256
|
||||||
raise Exception(f"Invalid private key type: {type(private_key)}")
|
raise Exception(f"Invalid private key type: {type(private_key)}")
|
||||||
|
|
||||||
def get_issuer(self, request: HttpRequest) -> Optional[str]:
|
def get_issuer(self, request: HttpRequest) -> Optional[str]:
|
||||||
@ -294,7 +303,7 @@ class OAuth2Provider(Provider):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderSerializer
|
from authentik.providers.oauth2.api.providers import OAuth2ProviderSerializer
|
||||||
|
|
||||||
return OAuth2ProviderSerializer
|
return OAuth2ProviderSerializer
|
||||||
|
|
||||||
@ -306,10 +315,9 @@ class OAuth2Provider(Provider):
|
|||||||
headers = {}
|
headers = {}
|
||||||
if self.signing_key:
|
if self.signing_key:
|
||||||
headers["kid"] = self.signing_key.kid
|
headers["kid"] = self.signing_key.kid
|
||||||
key, alg = self.get_jwt_key()
|
key, alg = self.jwt_key
|
||||||
# If the provider does not have an RSA Key assigned, it was switched to Symmetric
|
# If the provider does not have an RSA Key assigned, it was switched to Symmetric
|
||||||
self.refresh_from_db()
|
self.refresh_from_db()
|
||||||
# pyright: reportGeneralTypeIssues=false
|
|
||||||
return encode(payload, key, algorithm=alg, headers=headers)
|
return encode(payload, key, algorithm=alg, headers=headers)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -392,6 +400,7 @@ class IDToken:
|
|||||||
iat: Optional[int] = None
|
iat: Optional[int] = None
|
||||||
auth_time: Optional[int] = None
|
auth_time: Optional[int] = None
|
||||||
acr: Optional[str] = ACR_AUTHENTIK_DEFAULT
|
acr: Optional[str] = ACR_AUTHENTIK_DEFAULT
|
||||||
|
amr: Optional[list[str]] = None
|
||||||
|
|
||||||
c_hash: Optional[str] = None
|
c_hash: Optional[str] = None
|
||||||
nonce: Optional[str] = None
|
nonce: Optional[str] = None
|
||||||
@ -466,6 +475,7 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
|||||||
token["uid"] = generate_key()
|
token["uid"] = generate_key()
|
||||||
return self.provider.encode(token)
|
return self.provider.encode(token)
|
||||||
|
|
||||||
|
# pylint: disable=too-many-locals
|
||||||
def create_id_token(self, user: User, request: HttpRequest) -> IDToken:
|
def create_id_token(self, user: User, request: HttpRequest) -> IDToken:
|
||||||
"""Creates the id_token.
|
"""Creates the id_token.
|
||||||
See: http://openid.net/specs/openid-connect-core-1_0.html#IDToken"""
|
See: http://openid.net/specs/openid-connect-core-1_0.html#IDToken"""
|
||||||
@ -485,21 +495,27 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
|||||||
f"selected: {self.provider.sub_mode}"
|
f"selected: {self.provider.sub_mode}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
amr = []
|
||||||
# Convert datetimes into timestamps.
|
# Convert datetimes into timestamps.
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
iat_time = int(now.timestamp())
|
iat_time = int(now.timestamp())
|
||||||
exp_time = int(self.expires.timestamp())
|
exp_time = int(self.expires.timestamp())
|
||||||
# We use the timestamp of the user's last successful login (EventAction.LOGIN) for auth_time
|
# We use the timestamp of the user's last successful login (EventAction.LOGIN) for auth_time
|
||||||
auth_event = (
|
|
||||||
Event.objects.filter(action=EventAction.LOGIN, user=get_user(user))
|
|
||||||
.order_by("-created")
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
# Fallback in case we can't find any login events
|
# Fallback in case we can't find any login events
|
||||||
auth_time = now
|
auth_time = now
|
||||||
if auth_event:
|
if SESSION_LOGIN_EVENT in request.session:
|
||||||
|
auth_event: Event = request.session[SESSION_LOGIN_EVENT]
|
||||||
auth_time = auth_event.created
|
auth_time = auth_event.created
|
||||||
|
# Also check which method was used for authentication
|
||||||
|
method = auth_event.context.get(PLAN_CONTEXT_METHOD, "")
|
||||||
|
method_args = auth_event.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
|
if method == "password":
|
||||||
|
amr.append(AMR_PASSWORD)
|
||||||
|
if method == "auth_webauthn_pwl":
|
||||||
|
amr.append(AMR_WEBAUTHN)
|
||||||
|
if "mfa_devices" in method_args:
|
||||||
|
if len(amr) > 0:
|
||||||
|
amr.append(AMR_MFA)
|
||||||
|
|
||||||
auth_timestamp = int(auth_time.timestamp())
|
auth_timestamp = int(auth_time.timestamp())
|
||||||
|
|
||||||
|
|||||||
47
authentik/providers/oauth2/tests/test_api.py
Normal file
47
authentik/providers/oauth2/tests/test_api.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
"""Test OAuth2 API"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
|
from authentik.lib.generators import generate_id, generate_key
|
||||||
|
from authentik.providers.oauth2.models import OAuth2Provider, ScopeMapping
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPI(APITestCase):
|
||||||
|
"""Test api view"""
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-oauth2.yaml")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.provider: OAuth2Provider = OAuth2Provider.objects.create(
|
||||||
|
name="test",
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_key(),
|
||||||
|
authorization_flow=create_test_flow(),
|
||||||
|
redirect_uris="http://testserver",
|
||||||
|
)
|
||||||
|
self.provider.property_mappings.set(ScopeMapping.objects.all())
|
||||||
|
self.app = Application.objects.create(name="test", slug="test", provider=self.provider)
|
||||||
|
self.user = create_test_admin_user()
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
|
def test_preview(self):
|
||||||
|
"""Test Preview API Endpoint"""
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:oauth2provider-preview-user", kwargs={"pk": self.provider.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content.decode())["preview"]
|
||||||
|
self.assertEqual(body["iss"], "http://testserver/application/o/test/")
|
||||||
|
|
||||||
|
def test_setup_urls(self):
|
||||||
|
"""Test Setup URLs API Endpoint"""
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:oauth2provider-setup-urls", kwargs={"pk": self.provider.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content.decode())
|
||||||
|
self.assertEqual(body["issuer"], "http://testserver/application/o/test/")
|
||||||
@ -143,7 +143,7 @@ class TestTokenClientCredentials(OAuthTestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
body = loads(response.content.decode())
|
body = loads(response.content.decode())
|
||||||
self.assertEqual(body["token_type"], "bearer")
|
self.assertEqual(body["token_type"], "bearer")
|
||||||
_, alg = self.provider.get_jwt_key()
|
_, alg = self.provider.jwt_key
|
||||||
jwt = decode(
|
jwt = decode(
|
||||||
body["access_token"],
|
body["access_token"],
|
||||||
key=self.provider.signing_key.public_key,
|
key=self.provider.signing_key.public_key,
|
||||||
|
|||||||
@ -210,7 +210,7 @@ class TestTokenClientCredentialsJWTSource(OAuthTestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
body = loads(response.content.decode())
|
body = loads(response.content.decode())
|
||||||
self.assertEqual(body["token_type"], "bearer")
|
self.assertEqual(body["token_type"], "bearer")
|
||||||
_, alg = self.provider.get_jwt_key()
|
_, alg = self.provider.jwt_key
|
||||||
jwt = decode(
|
jwt = decode(
|
||||||
body["access_token"],
|
body["access_token"],
|
||||||
key=self.provider.signing_key.public_key,
|
key=self.provider.signing_key.public_key,
|
||||||
|
|||||||
@ -29,7 +29,7 @@ class OAuthTestCase(TestCase):
|
|||||||
|
|
||||||
def validate_jwt(self, token: RefreshToken, provider: OAuth2Provider) -> dict[str, Any]:
|
def validate_jwt(self, token: RefreshToken, provider: OAuth2Provider) -> dict[str, Any]:
|
||||||
"""Validate that all required fields are set"""
|
"""Validate that all required fields are set"""
|
||||||
key, alg = provider.get_jwt_key()
|
key, alg = provider.jwt_key
|
||||||
if alg != JWTAlgorithms.HS256:
|
if alg != JWTAlgorithms.HS256:
|
||||||
key = provider.signing_key.public_key
|
key = provider.signing_key.public_key
|
||||||
jwt = decode(
|
jwt = decode(
|
||||||
|
|||||||
@ -38,7 +38,7 @@ class ProviderInfoView(View):
|
|||||||
)
|
)
|
||||||
if SCOPE_OPENID not in scopes:
|
if SCOPE_OPENID not in scopes:
|
||||||
scopes.append(SCOPE_OPENID)
|
scopes.append(SCOPE_OPENID)
|
||||||
_, supported_alg = provider.get_jwt_key()
|
_, supported_alg = provider.jwt_key
|
||||||
return {
|
return {
|
||||||
"issuer": provider.get_issuer(self.request),
|
"issuer": provider.get_issuer(self.request),
|
||||||
"authorization_endpoint": self.request.build_absolute_uri(
|
"authorization_endpoint": self.request.build_absolute_uri(
|
||||||
@ -52,7 +52,7 @@ class ProviderInfoView(View):
|
|||||||
),
|
),
|
||||||
"end_session_endpoint": self.request.build_absolute_uri(
|
"end_session_endpoint": self.request.build_absolute_uri(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_core:if-session-end",
|
"authentik_providers_oauth2:end-session",
|
||||||
kwargs={"application_slug": provider.application.slug},
|
kwargs={"application_slug": provider.application.slug},
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
|||||||
0
authentik/providers/saml/api/__init__.py
Normal file
0
authentik/providers/saml/api/__init__.py
Normal file
42
authentik/providers/saml/api/property_mapping.py
Normal file
42
authentik/providers/saml/api/property_mapping.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
"""SAML Property mappings API Views"""
|
||||||
|
from django_filters.filters import AllValuesMultipleFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.providers.saml.models import SAMLPropertyMapping
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLPropertyMappingSerializer(PropertyMappingSerializer):
|
||||||
|
"""SAMLPropertyMapping Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = SAMLPropertyMapping
|
||||||
|
fields = PropertyMappingSerializer.Meta.fields + [
|
||||||
|
"saml_name",
|
||||||
|
"friendly_name",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLPropertyMappingFilter(FilterSet):
|
||||||
|
"""Filter for SAMLPropertyMapping"""
|
||||||
|
|
||||||
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SAMLPropertyMapping
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class SAMLPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""SAMLPropertyMapping Viewset"""
|
||||||
|
|
||||||
|
queryset = SAMLPropertyMapping.objects.all()
|
||||||
|
serializer_class = SAMLPropertyMappingSerializer
|
||||||
|
filterset_class = SAMLPropertyMappingFilter
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
@ -7,15 +7,8 @@ from django.http.response import Http404, HttpResponse
|
|||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django_filters.filters import AllValuesMultipleFilter
|
|
||||||
from django_filters.filterset import FilterSet
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import (
|
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||||
OpenApiParameter,
|
|
||||||
OpenApiResponse,
|
|
||||||
extend_schema,
|
|
||||||
extend_schema_field,
|
|
||||||
)
|
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, FileField, SerializerMethodField
|
from rest_framework.fields import CharField, FileField, SerializerMethodField
|
||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
@ -28,15 +21,16 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
|
||||||
from authentik.core.api.providers import ProviderSerializer
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer, PropertyMappingPreviewSerializer
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.flows.models import Flow, FlowDesignation
|
from authentik.flows.models import Flow, FlowDesignation
|
||||||
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
from authentik.providers.saml.models import SAMLProvider
|
||||||
|
from authentik.providers.saml.processors.assertion import AssertionProcessor
|
||||||
from authentik.providers.saml.processors.metadata import MetadataProcessor
|
from authentik.providers.saml.processors.metadata import MetadataProcessor
|
||||||
from authentik.providers.saml.processors.metadata_parser import ServiceProviderMetadataParser
|
from authentik.providers.saml.processors.metadata_parser import ServiceProviderMetadataParser
|
||||||
|
from authentik.providers.saml.processors.request_parser import AuthNRequest
|
||||||
from authentik.sources.saml.processors.constants import SAML_BINDING_POST, SAML_BINDING_REDIRECT
|
from authentik.sources.saml.processors.constants import SAML_BINDING_POST, SAML_BINDING_REDIRECT
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -236,34 +230,31 @@ class SAMLProviderViewSet(UsedByMixin, ModelViewSet):
|
|||||||
)
|
)
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
|
@permission_required(
|
||||||
class SAMLPropertyMappingSerializer(PropertyMappingSerializer):
|
"authentik_providers_saml.view_samlprovider",
|
||||||
"""SAMLPropertyMapping Serializer"""
|
)
|
||||||
|
@extend_schema(
|
||||||
class Meta:
|
responses={
|
||||||
|
200: PropertyMappingPreviewSerializer(),
|
||||||
model = SAMLPropertyMapping
|
400: OpenApiResponse(description="Bad request"),
|
||||||
fields = PropertyMappingSerializer.Meta.fields + [
|
},
|
||||||
"saml_name",
|
)
|
||||||
"friendly_name",
|
@action(detail=True, methods=["GET"])
|
||||||
]
|
# pylint: disable=invalid-name, unused-argument
|
||||||
|
def preview_user(self, request: Request, pk: int) -> Response:
|
||||||
|
"""Preview user data for provider"""
|
||||||
class SAMLPropertyMappingFilter(FilterSet):
|
provider: SAMLProvider = self.get_object()
|
||||||
"""Filter for SAMLPropertyMapping"""
|
processor = AssertionProcessor(provider, request._request, AuthNRequest())
|
||||||
|
attributes = processor.get_attributes()
|
||||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
name_id = processor.get_name_id()
|
||||||
|
data = []
|
||||||
class Meta:
|
for attribute in attributes:
|
||||||
model = SAMLPropertyMapping
|
item = {"Value": []}
|
||||||
fields = "__all__"
|
item.update(attribute.attrib)
|
||||||
|
for value in attribute:
|
||||||
|
item["Value"].append(value.text)
|
||||||
class SAMLPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
data.append(item)
|
||||||
"""SAMLPropertyMapping Viewset"""
|
serializer = PropertyMappingPreviewSerializer(
|
||||||
|
instance={"preview": {"attributes": data, "nameID": name_id.text}}
|
||||||
queryset = SAMLPropertyMapping.objects.all()
|
)
|
||||||
serializer_class = SAMLPropertyMappingSerializer
|
return Response(serializer.data)
|
||||||
filterset_class = SAMLPropertyMappingFilter
|
|
||||||
search_fields = ["name"]
|
|
||||||
ordering = ["name"]
|
|
||||||
@ -164,7 +164,7 @@ class SAMLProvider(Provider):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.providers.saml.api import SAMLProviderSerializer
|
from authentik.providers.saml.api.providers import SAMLProviderSerializer
|
||||||
|
|
||||||
return SAMLProviderSerializer
|
return SAMLProviderSerializer
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ class SAMLPropertyMapping(PropertyMapping):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.providers.saml.api import SAMLPropertyMappingSerializer
|
from authentik.providers.saml.api.property_mapping import SAMLPropertyMappingSerializer
|
||||||
|
|
||||||
return SAMLPropertyMappingSerializer
|
return SAMLPropertyMappingSerializer
|
||||||
|
|
||||||
|
|||||||
@ -10,6 +10,7 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.events.signals import SESSION_LOGIN_EVENT
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||||
from authentik.providers.saml.processors.request_parser import AuthNRequest
|
from authentik.providers.saml.processors.request_parser import AuthNRequest
|
||||||
@ -30,6 +31,7 @@ from authentik.sources.saml.processors.constants import (
|
|||||||
SAML_NAME_ID_FORMAT_X509,
|
SAML_NAME_ID_FORMAT_X509,
|
||||||
SIGN_ALGORITHM_TRANSFORM_MAP,
|
SIGN_ALGORITHM_TRANSFORM_MAP,
|
||||||
)
|
)
|
||||||
|
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -129,9 +131,23 @@ class AssertionProcessor:
|
|||||||
auth_n_context_class_ref = SubElement(
|
auth_n_context_class_ref = SubElement(
|
||||||
auth_n_context, f"{{{NS_SAML_ASSERTION}}}AuthnContextClassRef"
|
auth_n_context, f"{{{NS_SAML_ASSERTION}}}AuthnContextClassRef"
|
||||||
)
|
)
|
||||||
|
auth_n_context_class_ref.text = "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified"
|
||||||
|
if SESSION_LOGIN_EVENT in self.http_request.session:
|
||||||
|
event: Event = self.http_request.session[SESSION_LOGIN_EVENT]
|
||||||
|
method = event.context.get(PLAN_CONTEXT_METHOD, "")
|
||||||
|
method_args = event.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
|
if method == "password":
|
||||||
auth_n_context_class_ref.text = (
|
auth_n_context_class_ref.text = (
|
||||||
"urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport"
|
"urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport"
|
||||||
)
|
)
|
||||||
|
if "mfa_devices" in method_args:
|
||||||
|
auth_n_context_class_ref.text = (
|
||||||
|
"urn:oasis:names:tc:SAML:2.0:ac:classes:MobileTwoFactorContract"
|
||||||
|
)
|
||||||
|
if method in ["auth_mfa", "auth_webauthn_pwl"]:
|
||||||
|
auth_n_context_class_ref.text = (
|
||||||
|
"urn:oasis:names:tc:SAML:2.0:ac:classes:MobileOneFactorContract"
|
||||||
|
)
|
||||||
return auth_n_statement
|
return auth_n_statement
|
||||||
|
|
||||||
def get_assertion_conditions(self) -> Element:
|
def get_assertion_conditions(self) -> Element:
|
||||||
|
|||||||
@ -1,13 +1,15 @@
|
|||||||
"""SAML Provider API Tests"""
|
"""SAML Provider API Tests"""
|
||||||
|
from json import loads
|
||||||
from tempfile import TemporaryFile
|
from tempfile import TemporaryFile
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.flows.models import FlowDesignation
|
from authentik.flows.models import FlowDesignation
|
||||||
from authentik.providers.saml.models import SAMLProvider
|
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||||
from authentik.providers.saml.tests.test_metadata import METADATA_SIMPLE
|
from authentik.providers.saml.tests.test_metadata import METADATA_SIMPLE
|
||||||
|
|
||||||
|
|
||||||
@ -107,3 +109,24 @@ class TestSAMLProviderAPI(APITestCase):
|
|||||||
format="multipart",
|
format="multipart",
|
||||||
)
|
)
|
||||||
self.assertEqual(400, response.status_code)
|
self.assertEqual(400, response.status_code)
|
||||||
|
|
||||||
|
@apply_blueprint("system/providers-saml.yaml")
|
||||||
|
def test_preview(self):
|
||||||
|
"""Test Preview API Endpoint"""
|
||||||
|
provider: SAMLProvider = SAMLProvider.objects.create(
|
||||||
|
name="test",
|
||||||
|
authorization_flow=create_test_flow(),
|
||||||
|
)
|
||||||
|
provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||||
|
Application.objects.create(name="test", provider=provider, slug="test")
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:samlprovider-preview-user", kwargs={"pk": provider.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content.decode())["preview"]["attributes"]
|
||||||
|
self.assertEqual(
|
||||||
|
[x for x in body if x["Name"] == "http://schemas.goauthentik.io/2021/02/saml/username"][
|
||||||
|
0
|
||||||
|
]["Value"],
|
||||||
|
[self.user.username],
|
||||||
|
)
|
||||||
|
|||||||
@ -452,23 +452,30 @@ _DISALLOWED_ITEMS = [
|
|||||||
"AUTHENTICATION_BACKENDS",
|
"AUTHENTICATION_BACKENDS",
|
||||||
"CELERY_BEAT_SCHEDULE",
|
"CELERY_BEAT_SCHEDULE",
|
||||||
]
|
]
|
||||||
# Load subapps's INSTALLED_APPS
|
|
||||||
for _app in INSTALLED_APPS:
|
|
||||||
if _app.startswith("authentik"):
|
def _update_settings(app_path: str):
|
||||||
if "apps" in _app:
|
|
||||||
_app = ".".join(_app.split(".")[:-2])
|
|
||||||
try:
|
try:
|
||||||
app_settings = importlib.import_module(f"{_app}.settings")
|
settings_module = importlib.import_module(app_path)
|
||||||
INSTALLED_APPS.extend(getattr(app_settings, "INSTALLED_APPS", []))
|
CONFIG.log("debug", "Loaded app settings", path=app_path)
|
||||||
MIDDLEWARE.extend(getattr(app_settings, "MIDDLEWARE", []))
|
INSTALLED_APPS.extend(getattr(settings_module, "INSTALLED_APPS", []))
|
||||||
AUTHENTICATION_BACKENDS.extend(getattr(app_settings, "AUTHENTICATION_BACKENDS", []))
|
MIDDLEWARE.extend(getattr(settings_module, "MIDDLEWARE", []))
|
||||||
CELERY_BEAT_SCHEDULE.update(getattr(app_settings, "CELERY_BEAT_SCHEDULE", {}))
|
AUTHENTICATION_BACKENDS.extend(getattr(settings_module, "AUTHENTICATION_BACKENDS", []))
|
||||||
for _attr in dir(app_settings):
|
CELERY_BEAT_SCHEDULE.update(getattr(settings_module, "CELERY_BEAT_SCHEDULE", {}))
|
||||||
|
for _attr in dir(settings_module):
|
||||||
if not _attr.startswith("__") and _attr not in _DISALLOWED_ITEMS:
|
if not _attr.startswith("__") and _attr not in _DISALLOWED_ITEMS:
|
||||||
globals()[_attr] = getattr(app_settings, _attr)
|
globals()[_attr] = getattr(settings_module, _attr)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Load subapps's settings
|
||||||
|
for _app in INSTALLED_APPS:
|
||||||
|
if not _app.startswith("authentik"):
|
||||||
|
continue
|
||||||
|
_update_settings(f"{_app}.settings")
|
||||||
|
_update_settings("data.user_settings")
|
||||||
|
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
CELERY_TASK_ALWAYS_EAGER = True
|
CELERY_TASK_ALWAYS_EAGER = True
|
||||||
os.environ[ENV_GIT_HASH_KEY] = "dev"
|
os.environ[ENV_GIT_HASH_KEY] = "dev"
|
||||||
|
|||||||
@ -34,7 +34,7 @@ class PytestTestRunner: # pragma: no cover
|
|||||||
"outposts.container_image_base",
|
"outposts.container_image_base",
|
||||||
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
|
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
|
||||||
)
|
)
|
||||||
CONFIG.y_set("error_reporting.sample_rate", 1.0)
|
CONFIG.y_set("error_reporting.sample_rate", 0)
|
||||||
sentry_init(
|
sentry_init(
|
||||||
environment="testing",
|
environment="testing",
|
||||||
send_default_pii=True,
|
send_default_pii=True,
|
||||||
|
|||||||
@ -8,7 +8,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
|
|
||||||
from authentik.core.api.sources import SourceSerializer
|
from authentik.core.api.sources import SourceSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.providers.saml.api import SAMLMetadataSerializer
|
from authentik.providers.saml.api.providers import SAMLMetadataSerializer
|
||||||
from authentik.sources.saml.models import SAMLSource
|
from authentik.sources.saml.models import SAMLSource
|
||||||
from authentik.sources.saml.processors.metadata import MetadataProcessor
|
from authentik.sources.saml.processors.metadata import MetadataProcessor
|
||||||
|
|
||||||
|
|||||||
@ -202,10 +202,10 @@ class ResponseProcessor:
|
|||||||
"""Get all attributes sent"""
|
"""Get all attributes sent"""
|
||||||
attributes = {}
|
attributes = {}
|
||||||
assertion = self._root.find(f"{{{NS_SAML_ASSERTION}}}Assertion")
|
assertion = self._root.find(f"{{{NS_SAML_ASSERTION}}}Assertion")
|
||||||
if not assertion:
|
if assertion is None:
|
||||||
raise ValueError("Assertion element not found")
|
raise ValueError("Assertion element not found")
|
||||||
attribute_statement = assertion.find(f"{{{NS_SAML_ASSERTION}}}AttributeStatement")
|
attribute_statement = assertion.find(f"{{{NS_SAML_ASSERTION}}}AttributeStatement")
|
||||||
if not attribute_statement:
|
if attribute_statement is None:
|
||||||
raise ValueError("Attribute statement element not found")
|
raise ValueError("Attribute statement element not found")
|
||||||
# Get all attributes and their values into a dict
|
# Get all attributes and their values into a dict
|
||||||
for attribute in attribute_statement.iterchildren():
|
for attribute in attribute_statement.iterchildren():
|
||||||
|
|||||||
@ -118,16 +118,17 @@ class AuthenticatorDuoStageViewSet(UsedByMixin, ModelViewSet):
|
|||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if not user:
|
if not user:
|
||||||
return Response(data={"non_field_errors": ["user does not exist"]}, status=400)
|
return Response(data={"non_field_errors": ["User does not exist."]}, status=400)
|
||||||
device = DuoDevice.objects.filter(
|
device = DuoDevice.objects.filter(
|
||||||
duo_user_id=request.data.get("duo_user_id"), user=user, stage=stage
|
duo_user_id=request.data.get("duo_user_id"), user=user, stage=stage
|
||||||
).first()
|
).first()
|
||||||
if device:
|
if device:
|
||||||
return Response(data={"non_field_errors": ["device exists already"]}, status=400)
|
return Response(data={"non_field_errors": ["Device exists already."]}, status=400)
|
||||||
DuoDevice.objects.create(
|
DuoDevice.objects.create(
|
||||||
duo_user_id=request.data.get("duo_user_id"),
|
duo_user_id=request.data.get("duo_user_id"),
|
||||||
user=user,
|
user=user,
|
||||||
stage=stage,
|
stage=stage,
|
||||||
|
confirmed=True,
|
||||||
name="Imported Duo Authenticator",
|
name="Imported Duo Authenticator",
|
||||||
)
|
)
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|||||||
@ -13,8 +13,9 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
|
|||||||
|
|
||||||
def validate_not_configured_action(self, value):
|
def validate_not_configured_action(self, value):
|
||||||
"""Ensure that a configuration stage is set when not_configured_action is configure"""
|
"""Ensure that a configuration stage is set when not_configured_action is configure"""
|
||||||
configuration_stages = self.initial_data.get("configuration_stages")
|
configuration_stages = self.initial_data.get("configuration_stages", None)
|
||||||
if value == NotConfiguredAction.CONFIGURE and configuration_stages is None:
|
if value == NotConfiguredAction.CONFIGURE:
|
||||||
|
if not configuration_stages or len(configuration_stages) < 1:
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
(
|
(
|
||||||
'When "Not configured action" is set to "Configure", '
|
'When "Not configured action" is set to "Configure", '
|
||||||
|
|||||||
@ -200,15 +200,16 @@ def validate_challenge_duo(device_pk: int, stage_view: StageView, user: User) ->
|
|||||||
)
|
)
|
||||||
# {'result': 'allow', 'status': 'allow', 'status_msg': 'Success. Logging you in...'}
|
# {'result': 'allow', 'status': 'allow', 'status_msg': 'Success. Logging you in...'}
|
||||||
if response["result"] == "deny":
|
if response["result"] == "deny":
|
||||||
|
LOGGER.debug("duo push response", result=response["result"], msg=response["status_msg"])
|
||||||
login_failed.send(
|
login_failed.send(
|
||||||
sender=__name__,
|
sender=__name__,
|
||||||
credentials={"username": user.username},
|
credentials={"username": user.username},
|
||||||
request=stage_view.request,
|
request=stage_view.request,
|
||||||
stage=stage_view.executor.current_stage,
|
stage=stage_view.executor.current_stage,
|
||||||
device_class=DeviceClasses.DUO.value,
|
device_class=DeviceClasses.DUO.value,
|
||||||
|
duo_response=response,
|
||||||
)
|
)
|
||||||
raise ValidationError("Duo denied access")
|
raise ValidationError("Duo denied access", code="denied")
|
||||||
device.save()
|
|
||||||
return device
|
return device
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
Event.new(
|
Event.new(
|
||||||
@ -216,4 +217,4 @@ def validate_challenge_duo(device_pk: int, stage_view: StageView, user: User) ->
|
|||||||
message=f"Failed to DUO authenticate user: {str(exc)}",
|
message=f"Failed to DUO authenticate user: {str(exc)}",
|
||||||
user=user,
|
user=user,
|
||||||
).from_http(stage_view.request, user)
|
).from_http(stage_view.request, user)
|
||||||
raise ValidationError("Duo denied access")
|
raise ValidationError("Duo denied access", code="denied")
|
||||||
|
|||||||
@ -134,6 +134,12 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
|||||||
# Here we only check if the any data was sent at all
|
# Here we only check if the any data was sent at all
|
||||||
if "code" not in attrs and "webauthn" not in attrs and "duo" not in attrs:
|
if "code" not in attrs and "webauthn" not in attrs and "duo" not in attrs:
|
||||||
raise ValidationError("Empty response")
|
raise ValidationError("Empty response")
|
||||||
|
self.stage.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "auth_mfa")
|
||||||
|
self.stage.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
|
self.stage.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].setdefault("mfa_devices", [])
|
||||||
|
self.stage.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS]["mfa_devices"].append(
|
||||||
|
self.device
|
||||||
|
)
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,17 +3,22 @@ from unittest.mock import MagicMock, patch
|
|||||||
|
|
||||||
from django.contrib.sessions.middleware import SessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.test.client import RequestFactory
|
from django.test.client import RequestFactory
|
||||||
|
from django.urls import reverse
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.flows.planner import FlowPlan
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
||||||
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
from authentik.flows.views.executor import FlowExecutorView
|
from authentik.flows.views.executor import SESSION_KEY_PLAN, FlowExecutorView
|
||||||
from authentik.lib.generators import generate_id, generate_key
|
from authentik.lib.generators import generate_id, generate_key
|
||||||
from authentik.lib.tests.utils import dummy_get_response
|
from authentik.lib.tests.utils import dummy_get_response
|
||||||
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
||||||
from authentik.stages.authenticator_validate.challenge import validate_challenge_duo
|
from authentik.stages.authenticator_validate.challenge import validate_challenge_duo
|
||||||
|
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage, DeviceClasses
|
||||||
|
from authentik.stages.user_login.models import UserLoginStage
|
||||||
from authentik.tenants.utils import get_tenant_for_request
|
from authentik.tenants.utils import get_tenant_for_request
|
||||||
|
|
||||||
|
|
||||||
@ -73,7 +78,17 @@ class AuthenticatorValidateStageDuoTests(FlowTestCase):
|
|||||||
)
|
)
|
||||||
with patch(
|
with patch(
|
||||||
"authentik.stages.authenticator_duo.models.AuthenticatorDuoStage.auth_client",
|
"authentik.stages.authenticator_duo.models.AuthenticatorDuoStage.auth_client",
|
||||||
MagicMock(return_value=MagicMock(auth=MagicMock(return_value={"result": "deny"}))),
|
MagicMock(
|
||||||
|
return_value=MagicMock(
|
||||||
|
auth=MagicMock(
|
||||||
|
return_value={
|
||||||
|
"result": "deny",
|
||||||
|
"status": "deny",
|
||||||
|
"status_msg": "foo",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
):
|
):
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
validate_challenge_duo(
|
validate_challenge_duo(
|
||||||
@ -87,3 +102,88 @@ class AuthenticatorValidateStageDuoTests(FlowTestCase):
|
|||||||
),
|
),
|
||||||
self.user,
|
self.user,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.stages.authenticator_duo.models.AuthenticatorDuoStage.auth_client",
|
||||||
|
MagicMock(
|
||||||
|
return_value=MagicMock(
|
||||||
|
auth=MagicMock(
|
||||||
|
return_value={
|
||||||
|
"result": "allow",
|
||||||
|
"status": "allow",
|
||||||
|
"status_msg": "Success. Logging you in...",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_full(self):
|
||||||
|
"""Test full within a flow executor"""
|
||||||
|
duo_stage = AuthenticatorDuoStage.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
client_id=generate_id(),
|
||||||
|
client_secret=generate_key(),
|
||||||
|
api_hostname="",
|
||||||
|
)
|
||||||
|
duo_device = DuoDevice.objects.create(
|
||||||
|
user=self.user,
|
||||||
|
stage=duo_stage,
|
||||||
|
)
|
||||||
|
|
||||||
|
flow = create_test_flow(FlowDesignation.AUTHENTICATION)
|
||||||
|
stage = AuthenticatorValidateStage.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
device_classes=[DeviceClasses.DUO],
|
||||||
|
)
|
||||||
|
|
||||||
|
plan = FlowPlan(flow_pk=flow.pk.hex)
|
||||||
|
plan.append(FlowStageBinding.objects.create(target=flow, stage=stage, order=2))
|
||||||
|
plan.append(
|
||||||
|
FlowStageBinding.objects.create(
|
||||||
|
target=flow, stage=UserLoginStage.objects.create(name=generate_id()), order=3
|
||||||
|
)
|
||||||
|
)
|
||||||
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
|
session = self.client.session
|
||||||
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
session.save()
|
||||||
|
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
|
{"duo": duo_device.pk},
|
||||||
|
follow=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||||
|
event = Event.objects.filter(
|
||||||
|
action=EventAction.LOGIN,
|
||||||
|
user__pk=self.user.pk,
|
||||||
|
).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(
|
||||||
|
event.context,
|
||||||
|
{
|
||||||
|
"auth_method": "auth_mfa",
|
||||||
|
"auth_method_args": {
|
||||||
|
"mfa_devices": [
|
||||||
|
{
|
||||||
|
"app": "authentik_stages_authenticator_duo",
|
||||||
|
"model_name": "duodevice",
|
||||||
|
"name": "",
|
||||||
|
"pk": duo_device.pk,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"http_request": {
|
||||||
|
"args": {},
|
||||||
|
"method": "GET",
|
||||||
|
"path": f"/api/v3/flows/executor/{flow.slug}/",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|||||||
@ -68,7 +68,10 @@ class AuthenticatorValidateStageTests(FlowTestCase):
|
|||||||
"""Test serializer validation"""
|
"""Test serializer validation"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
serializer = AuthenticatorValidateStageSerializer(
|
serializer = AuthenticatorValidateStageSerializer(
|
||||||
data={"name": generate_id(), "not_configured_action": NotConfiguredAction.CONFIGURE}
|
data={
|
||||||
|
"name": generate_id(),
|
||||||
|
"not_configured_action": NotConfiguredAction.CONFIGURE,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
self.assertFalse(serializer.is_valid())
|
self.assertFalse(serializer.is_valid())
|
||||||
self.assertIn("not_configured_action", serializer.errors)
|
self.assertIn("not_configured_action", serializer.errors)
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
"""Test validator stage"""
|
"""Test validator stage"""
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from http.cookies import SimpleCookie
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -76,7 +75,7 @@ class AuthenticatorValidateStageTOTPTests(FlowTestCase):
|
|||||||
component="ak-stage-authenticator-validate",
|
component="ak-stage-authenticator-validate",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_last_auth_threshold_valid(self) -> SimpleCookie:
|
def test_last_auth_threshold_valid(self):
|
||||||
"""Test last_auth_threshold"""
|
"""Test last_auth_threshold"""
|
||||||
ident_stage = IdentificationStage.objects.create(
|
ident_stage = IdentificationStage.objects.create(
|
||||||
name=generate_id(),
|
name=generate_id(),
|
||||||
@ -115,12 +114,47 @@ class AuthenticatorValidateStageTOTPTests(FlowTestCase):
|
|||||||
)
|
)
|
||||||
self.assertIn(COOKIE_NAME_MFA, response.cookies)
|
self.assertIn(COOKIE_NAME_MFA, response.cookies)
|
||||||
self.assertStageResponse(response, component="xak-flow-redirect", to="/")
|
self.assertStageResponse(response, component="xak-flow-redirect", to="/")
|
||||||
return response.cookies
|
|
||||||
|
|
||||||
def test_last_auth_skip(self):
|
def test_last_auth_skip(self):
|
||||||
"""Test valid cookie"""
|
"""Test valid cookie"""
|
||||||
cookies = self.test_last_auth_threshold_valid()
|
ident_stage = IdentificationStage.objects.create(
|
||||||
mfa_cookie = cookies[COOKIE_NAME_MFA]
|
name=generate_id(),
|
||||||
|
user_fields=[
|
||||||
|
UserFields.USERNAME,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
device: TOTPDevice = TOTPDevice.objects.create(
|
||||||
|
user=self.user,
|
||||||
|
confirmed=True,
|
||||||
|
)
|
||||||
|
stage = AuthenticatorValidateStage.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
last_auth_threshold="hours=1",
|
||||||
|
not_configured_action=NotConfiguredAction.CONFIGURE,
|
||||||
|
device_classes=[DeviceClasses.TOTP],
|
||||||
|
)
|
||||||
|
stage.configuration_stages.set([ident_stage])
|
||||||
|
FlowStageBinding.objects.create(target=self.flow, stage=ident_stage, order=0)
|
||||||
|
FlowStageBinding.objects.create(target=self.flow, stage=stage, order=1)
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||||
|
{"uid_field": self.user.username},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||||
|
)
|
||||||
|
# Verify token once here to set last_t etc
|
||||||
|
totp = TOTP(device.bin_key)
|
||||||
|
sleep(1)
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||||
|
{"code": str(totp.token())},
|
||||||
|
)
|
||||||
|
self.assertIn(COOKIE_NAME_MFA, response.cookies)
|
||||||
|
self.assertStageResponse(response, component="xak-flow-redirect", to="/")
|
||||||
|
mfa_cookie = response.cookies[COOKIE_NAME_MFA]
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
self.client.cookies[COOKIE_NAME_MFA] = mfa_cookie
|
self.client.cookies[COOKIE_NAME_MFA] = mfa_cookie
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
|
|||||||
@ -12,7 +12,7 @@ class CaptchaStageSerializer(StageSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = CaptchaStage
|
model = CaptchaStage
|
||||||
fields = StageSerializer.Meta.fields + ["public_key", "private_key"]
|
fields = StageSerializer.Meta.fields + ["public_key", "private_key", "js_url", "api_url"]
|
||||||
extra_kwargs = {"private_key": {"write_only": True}}
|
extra_kwargs = {"private_key": {"write_only": True}}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 4.1.2 on 2022-10-20 19:30
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_stages_captcha", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="captchastage",
|
||||||
|
name="api_url",
|
||||||
|
field=models.TextField(default="https://www.recaptcha.net/recaptcha/api/siteverify"),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="captchastage",
|
||||||
|
name="js_url",
|
||||||
|
field=models.TextField(default="https://www.recaptcha.net/recaptcha/api.js"),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="captchastage",
|
||||||
|
name="private_key",
|
||||||
|
field=models.TextField(help_text="Private key, acquired your captcha Provider."),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="captchastage",
|
||||||
|
name="public_key",
|
||||||
|
field=models.TextField(help_text="Public key, acquired your captcha Provider."),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -11,12 +11,11 @@ from authentik.flows.models import Stage
|
|||||||
class CaptchaStage(Stage):
|
class CaptchaStage(Stage):
|
||||||
"""Verify the user is human using Google's reCaptcha."""
|
"""Verify the user is human using Google's reCaptcha."""
|
||||||
|
|
||||||
public_key = models.TextField(
|
public_key = models.TextField(help_text=_("Public key, acquired your captcha Provider."))
|
||||||
help_text=_("Public key, acquired from https://www.google.com/recaptcha/intro/v3.html")
|
private_key = models.TextField(help_text=_("Private key, acquired your captcha Provider."))
|
||||||
)
|
|
||||||
private_key = models.TextField(
|
js_url = models.TextField(default="https://www.recaptcha.net/recaptcha/api.js")
|
||||||
help_text=_("Private key, acquired from https://www.google.com/recaptcha/intro/v3.html")
|
api_url = models.TextField(default="https://www.recaptcha.net/recaptcha/api/siteverify")
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
|
|||||||
@ -20,6 +20,7 @@ class CaptchaChallenge(WithUserInfoChallenge):
|
|||||||
"""Site public key"""
|
"""Site public key"""
|
||||||
|
|
||||||
site_key = CharField()
|
site_key = CharField()
|
||||||
|
js_url = CharField(read_only=True)
|
||||||
component = CharField(default="ak-stage-captcha")
|
component = CharField(default="ak-stage-captcha")
|
||||||
|
|
||||||
|
|
||||||
@ -34,7 +35,7 @@ class CaptchaChallengeResponse(ChallengeResponse):
|
|||||||
stage: CaptchaStage = self.stage.executor.current_stage
|
stage: CaptchaStage = self.stage.executor.current_stage
|
||||||
try:
|
try:
|
||||||
response = get_http_session().post(
|
response = get_http_session().post(
|
||||||
"https://www.google.com/recaptcha/api/siteverify",
|
stage.api_url,
|
||||||
headers={
|
headers={
|
||||||
"Content-type": "application/x-www-form-urlencoded",
|
"Content-type": "application/x-www-form-urlencoded",
|
||||||
},
|
},
|
||||||
@ -61,6 +62,7 @@ class CaptchaStageView(ChallengeStageView):
|
|||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
return CaptchaChallenge(
|
return CaptchaChallenge(
|
||||||
data={
|
data={
|
||||||
|
"js_url": self.executor.current_stage.js_url,
|
||||||
"type": ChallengeTypes.NATIVE.value,
|
"type": ChallengeTypes.NATIVE.value,
|
||||||
"site_key": self.executor.current_stage.public_key,
|
"site_key": self.executor.current_stage.public_key,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -44,6 +44,28 @@ class TestEmailStage(FlowTestCase):
|
|||||||
response = self.client.get(url)
|
response = self.client.get(url)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.stages.email.models.EmailStage.backend_class",
|
||||||
|
PropertyMock(return_value=EmailBackend),
|
||||||
|
)
|
||||||
|
def test_rendering_locale(self):
|
||||||
|
"""Test with pending user"""
|
||||||
|
self.user.attributes = {"settings": {"locale": "de"}}
|
||||||
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
|
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||||
|
session = self.client.session
|
||||||
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
session.save()
|
||||||
|
|
||||||
|
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
|
response = self.client.get(url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(len(mail.outbox), 1)
|
||||||
|
self.assertEqual(mail.outbox[0].subject, "authentik")
|
||||||
|
self.assertNotIn(
|
||||||
|
"You recently requested to change your password", mail.outbox[0].alternatives[0][0]
|
||||||
|
)
|
||||||
|
|
||||||
def test_without_user(self):
|
def test_without_user(self):
|
||||||
"""Test without pending user"""
|
"""Test without pending user"""
|
||||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
@ -55,6 +77,10 @@ class TestEmailStage(FlowTestCase):
|
|||||||
response = self.client.get(url)
|
response = self.client.get(url)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.stages.email.models.EmailStage.backend_class",
|
||||||
|
PropertyMock(return_value=EmailBackend),
|
||||||
|
)
|
||||||
def test_pending_user(self):
|
def test_pending_user(self):
|
||||||
"""Test with pending user"""
|
"""Test with pending user"""
|
||||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
@ -64,16 +90,16 @@ class TestEmailStage(FlowTestCase):
|
|||||||
session.save()
|
session.save()
|
||||||
|
|
||||||
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
with patch(
|
|
||||||
"authentik.stages.email.models.EmailStage.backend_class",
|
|
||||||
PropertyMock(return_value=EmailBackend),
|
|
||||||
):
|
|
||||||
response = self.client.post(url)
|
response = self.client.post(url)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(len(mail.outbox), 1)
|
self.assertEqual(len(mail.outbox), 1)
|
||||||
self.assertEqual(mail.outbox[0].subject, "authentik")
|
self.assertEqual(mail.outbox[0].subject, "authentik")
|
||||||
self.assertEqual(mail.outbox[0].to, [self.user.email])
|
self.assertEqual(mail.outbox[0].to, [self.user.email])
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.stages.email.models.EmailStage.backend_class",
|
||||||
|
PropertyMock(return_value=EmailBackend),
|
||||||
|
)
|
||||||
def test_pending_user_override(self):
|
def test_pending_user_override(self):
|
||||||
"""Test with pending user (override to)"""
|
"""Test with pending user (override to)"""
|
||||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
@ -84,23 +110,19 @@ class TestEmailStage(FlowTestCase):
|
|||||||
session.save()
|
session.save()
|
||||||
|
|
||||||
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
with patch(
|
|
||||||
"authentik.stages.email.models.EmailStage.backend_class",
|
|
||||||
PropertyMock(return_value=EmailBackend),
|
|
||||||
):
|
|
||||||
response = self.client.post(url)
|
response = self.client.post(url)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertEqual(len(mail.outbox), 1)
|
self.assertEqual(len(mail.outbox), 1)
|
||||||
self.assertEqual(mail.outbox[0].subject, "authentik")
|
self.assertEqual(mail.outbox[0].subject, "authentik")
|
||||||
self.assertEqual(mail.outbox[0].to, ["foo@bar.baz"])
|
self.assertEqual(mail.outbox[0].to, ["foo@bar.baz"])
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.stages.email.models.EmailStage.backend_class",
|
||||||
|
PropertyMock(return_value=SMTPEmailBackend),
|
||||||
|
)
|
||||||
def test_use_global_settings(self):
|
def test_use_global_settings(self):
|
||||||
"""Test use_global_settings"""
|
"""Test use_global_settings"""
|
||||||
host = "some-unique-string"
|
host = "some-unique-string"
|
||||||
with patch(
|
|
||||||
"authentik.stages.email.models.EmailStage.backend_class",
|
|
||||||
PropertyMock(return_value=SMTPEmailBackend),
|
|
||||||
):
|
|
||||||
with self.settings(EMAIL_HOST=host):
|
with self.settings(EMAIL_HOST=host):
|
||||||
self.assertEqual(EmailStage(use_global_settings=True).backend.host, host)
|
self.assertEqual(EmailStage(use_global_settings=True).backend.host, host)
|
||||||
|
|
||||||
|
|||||||
@ -8,6 +8,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||||||
from authentik.core.api.groups import GroupMemberSerializer
|
from authentik.core.api.groups import GroupMemberSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import is_dict
|
from authentik.core.api.utils import is_dict
|
||||||
|
from authentik.flows.api.flows import FlowSerializer
|
||||||
from authentik.flows.api.stages import StageSerializer
|
from authentik.flows.api.stages import StageSerializer
|
||||||
from authentik.stages.invitation.models import Invitation, InvitationStage
|
from authentik.stages.invitation.models import Invitation, InvitationStage
|
||||||
|
|
||||||
@ -49,6 +50,7 @@ class InvitationSerializer(ModelSerializer):
|
|||||||
|
|
||||||
created_by = GroupMemberSerializer(read_only=True)
|
created_by = GroupMemberSerializer(read_only=True)
|
||||||
fixed_data = JSONField(validators=[is_dict], required=False)
|
fixed_data = JSONField(validators=[is_dict], required=False)
|
||||||
|
flow_obj = FlowSerializer(read_only=True, required=False, source="flow")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -60,6 +62,8 @@ class InvitationSerializer(ModelSerializer):
|
|||||||
"fixed_data",
|
"fixed_data",
|
||||||
"created_by",
|
"created_by",
|
||||||
"single_use",
|
"single_use",
|
||||||
|
"flow",
|
||||||
|
"flow_obj",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -69,8 +73,8 @@ class InvitationViewSet(UsedByMixin, ModelViewSet):
|
|||||||
queryset = Invitation.objects.all()
|
queryset = Invitation.objects.all()
|
||||||
serializer_class = InvitationSerializer
|
serializer_class = InvitationSerializer
|
||||||
ordering = ["-expires"]
|
ordering = ["-expires"]
|
||||||
search_fields = ["name", "created_by__username", "expires"]
|
search_fields = ["name", "created_by__username", "expires", "flow__slug"]
|
||||||
filterset_fields = ["name", "created_by__username", "expires"]
|
filterset_fields = ["name", "created_by__username", "expires", "flow__slug"]
|
||||||
|
|
||||||
def perform_create(self, serializer: InvitationSerializer):
|
def perform_create(self, serializer: InvitationSerializer):
|
||||||
serializer.save(created_by=self.request.user)
|
serializer.save(created_by=self.request.user)
|
||||||
|
|||||||
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 4.1.4 on 2022-12-20 13:43
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_flows", "0024_flow_authentication"),
|
||||||
|
("authentik_stages_invitation", "0001_squashed_0006_invitation_name"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="invitation",
|
||||||
|
name="flow",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="When set, only the configured flow can use this invitation.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_flows.flow",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -55,6 +55,13 @@ class Invitation(SerializerModel, ExpiringModel):
|
|||||||
|
|
||||||
name = models.SlugField()
|
name = models.SlugField()
|
||||||
|
|
||||||
|
flow = models.ForeignKey(
|
||||||
|
"authentik_flows.Flow",
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
help_text=_("When set, only the configured flow can use this invitation."),
|
||||||
|
)
|
||||||
single_use = models.BooleanField(
|
single_use = models.BooleanField(
|
||||||
default=False,
|
default=False,
|
||||||
help_text=_("When enabled, the invitation will be deleted after usage."),
|
help_text=_("When enabled, the invitation will be deleted after usage."),
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from typing import Optional
|
|||||||
|
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.flows.views.executor import SESSION_KEY_GET
|
from authentik.flows.views.executor import SESSION_KEY_GET
|
||||||
@ -35,22 +36,30 @@ class InvitationStageView(StageView):
|
|||||||
return self.executor.plan.context[PLAN_CONTEXT_PROMPT][INVITATION_TOKEN_KEY_CONTEXT]
|
return self.executor.plan.context[PLAN_CONTEXT_PROMPT][INVITATION_TOKEN_KEY_CONTEXT]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get(self, request: HttpRequest) -> HttpResponse:
|
def get_invite(self) -> Optional[Invitation]:
|
||||||
"""Apply data to the current flow based on a URL"""
|
"""Check the token, find the invite and check it's flow"""
|
||||||
stage: InvitationStage = self.executor.current_stage
|
|
||||||
token = self.get_token()
|
token = self.get_token()
|
||||||
if not token:
|
if not token:
|
||||||
# No Invitation was given, raise error or continue
|
return None
|
||||||
if stage.continue_flow_without_invitation:
|
|
||||||
return self.executor.stage_ok()
|
|
||||||
return self.executor.stage_invalid()
|
|
||||||
|
|
||||||
invite: Invitation = Invitation.objects.filter(pk=token).first()
|
invite: Invitation = Invitation.objects.filter(pk=token).first()
|
||||||
if not invite:
|
if not invite:
|
||||||
self.logger.debug("invalid invitation", token=token)
|
self.logger.debug("invalid invitation", token=token)
|
||||||
|
return None
|
||||||
|
if invite.flow and invite.flow.pk.hex != self.executor.plan.flow_pk:
|
||||||
|
self.logger.debug("invite for incorrect flow", expected=invite.flow.slug)
|
||||||
|
return None
|
||||||
|
return invite
|
||||||
|
|
||||||
|
def get(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
"""Apply data to the current flow based on a URL"""
|
||||||
|
stage: InvitationStage = self.executor.current_stage
|
||||||
|
|
||||||
|
invite = self.get_invite()
|
||||||
|
if not invite:
|
||||||
if stage.continue_flow_without_invitation:
|
if stage.continue_flow_without_invitation:
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
return self.executor.stage_invalid()
|
return self.executor.stage_invalid(_("Invalid invite/invite not found"))
|
||||||
|
|
||||||
self.executor.plan.context[INVITATION_IN_EFFECT] = True
|
self.executor.plan.context[INVITATION_IN_EFFECT] = True
|
||||||
self.executor.plan.context[INVITATION] = invite
|
self.executor.plan.context[INVITATION] = invite
|
||||||
|
|
||||||
|
|||||||
@ -23,7 +23,7 @@ from authentik.stages.password import BACKEND_INBUILT
|
|||||||
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
|
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
|
||||||
|
|
||||||
|
|
||||||
class TestUserLoginStage(FlowTestCase):
|
class TestInvitationStage(FlowTestCase):
|
||||||
"""Login tests"""
|
"""Login tests"""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -98,6 +98,33 @@ class TestUserLoginStage(FlowTestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||||
|
|
||||||
|
def test_invalid_flow(self):
|
||||||
|
"""Test with invitation, invalid flow limit"""
|
||||||
|
invalid_flow = create_test_flow(FlowDesignation.ENROLLMENT)
|
||||||
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
|
session = self.client.session
|
||||||
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
session.save()
|
||||||
|
|
||||||
|
data = {"foo": "bar"}
|
||||||
|
invite = Invitation.objects.create(
|
||||||
|
created_by=get_anonymous_user(), fixed_data=data, flow=invalid_flow
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):
|
||||||
|
base_url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
|
args = urlencode({INVITATION_TOKEN_KEY: invite.pk.hex})
|
||||||
|
response = self.client.get(base_url + f"?query={args}")
|
||||||
|
|
||||||
|
session = self.client.session
|
||||||
|
plan: FlowPlan = session[SESSION_KEY_PLAN]
|
||||||
|
|
||||||
|
self.assertStageResponse(
|
||||||
|
response,
|
||||||
|
flow=self.flow,
|
||||||
|
component="ak-stage-access-denied",
|
||||||
|
)
|
||||||
|
|
||||||
def test_with_invitation_prompt_data(self):
|
def test_with_invitation_prompt_data(self):
|
||||||
"""Test with invitation, check data in session"""
|
"""Test with invitation, check data in session"""
|
||||||
data = {"foo": "bar"}
|
data = {"foo": "bar"}
|
||||||
|
|||||||
@ -11,6 +11,7 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
|||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
from authentik.flows.tests.test_executor import TO_STAGE_RESPONSE_MOCK
|
from authentik.flows.tests.test_executor import TO_STAGE_RESPONSE_MOCK
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.stages.password import BACKEND_INBUILT
|
from authentik.stages.password import BACKEND_INBUILT
|
||||||
from authentik.stages.password.models import PasswordStage
|
from authentik.stages.password.models import PasswordStage
|
||||||
|
|
||||||
@ -25,7 +26,7 @@ class TestPasswordStage(FlowTestCase):
|
|||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
|
|
||||||
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
|
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
|
||||||
self.stage = PasswordStage.objects.create(name="password", backends=[BACKEND_INBUILT])
|
self.stage = PasswordStage.objects.create(name=generate_id(), backends=[BACKEND_INBUILT])
|
||||||
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
|
|||||||
@ -137,7 +137,7 @@ class TestPromptStage(FlowTestCase):
|
|||||||
self.assertIn(prompt.label, response.content.decode())
|
self.assertIn(prompt.label, response.content.decode())
|
||||||
self.assertIn(prompt.placeholder, response.content.decode())
|
self.assertIn(prompt.placeholder, response.content.decode())
|
||||||
|
|
||||||
def test_valid_challenge_with_policy(self) -> PromptChallengeResponse:
|
def test_valid_challenge_with_policy(self):
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
expr = (
|
expr = (
|
||||||
@ -151,9 +151,8 @@ class TestPromptStage(FlowTestCase):
|
|||||||
None, stage=self.stage, plan=plan, data=self.prompt_data
|
None, stage=self.stage, plan=plan, data=self.prompt_data
|
||||||
)
|
)
|
||||||
self.assertEqual(challenge_response.is_valid(), True)
|
self.assertEqual(challenge_response.is_valid(), True)
|
||||||
return challenge_response
|
|
||||||
|
|
||||||
def test_invalid_challenge(self) -> PromptChallengeResponse:
|
def test_invalid_challenge(self):
|
||||||
"""Test challenge_response validation"""
|
"""Test challenge_response validation"""
|
||||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
expr = "False"
|
expr = "False"
|
||||||
@ -164,7 +163,6 @@ class TestPromptStage(FlowTestCase):
|
|||||||
None, stage=self.stage, plan=plan, data=self.prompt_data
|
None, stage=self.stage, plan=plan, data=self.prompt_data
|
||||||
)
|
)
|
||||||
self.assertEqual(challenge_response.is_valid(), False)
|
self.assertEqual(challenge_response.is_valid(), False)
|
||||||
return challenge_response
|
|
||||||
|
|
||||||
def test_valid_challenge_request(self):
|
def test_valid_challenge_request(self):
|
||||||
"""Test a request with valid challenge_response data"""
|
"""Test a request with valid challenge_response data"""
|
||||||
@ -173,7 +171,18 @@ class TestPromptStage(FlowTestCase):
|
|||||||
session[SESSION_KEY_PLAN] = plan
|
session[SESSION_KEY_PLAN] = plan
|
||||||
session.save()
|
session.save()
|
||||||
|
|
||||||
challenge_response = self.test_valid_challenge_with_policy()
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
|
expr = (
|
||||||
|
"return request.context['prompt_data']['password_prompt'] "
|
||||||
|
"== request.context['prompt_data']['password2_prompt']"
|
||||||
|
)
|
||||||
|
expr_policy = ExpressionPolicy.objects.create(name="validate-form", expression=expr)
|
||||||
|
self.stage.validation_policies.set([expr_policy])
|
||||||
|
self.stage.save()
|
||||||
|
challenge_response = PromptChallengeResponse(
|
||||||
|
None, stage=self.stage, plan=plan, data=self.prompt_data
|
||||||
|
)
|
||||||
|
self.assertEqual(challenge_response.is_valid(), True)
|
||||||
|
|
||||||
with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):
|
with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
|
|||||||
@ -5,7 +5,7 @@ from django.http import HttpRequest, HttpResponse
|
|||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, PLAN_CONTEXT_SOURCE
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
from authentik.stages.password import BACKEND_INBUILT
|
from authentik.stages.password import BACKEND_INBUILT
|
||||||
@ -52,5 +52,8 @@ class UserLoginStageView(StageView):
|
|||||||
session_duration=self.executor.current_stage.session_duration,
|
session_duration=self.executor.current_stage.session_duration,
|
||||||
)
|
)
|
||||||
self.request.session[USER_LOGIN_AUTHENTICATED] = True
|
self.request.session[USER_LOGIN_AUTHENTICATED] = True
|
||||||
|
# Only show success message if we don't have a source in the flow
|
||||||
|
# as sources show their own success messages
|
||||||
|
if not self.executor.plan.context.get(PLAN_CONTEXT_SOURCE, None):
|
||||||
messages.success(self.request, _("Successfully logged in!"))
|
messages.success(self.request, _("Successfully logged in!"))
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user