Compare commits
274 Commits
version/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
| eddca478dc | |||
| 99a7fca08e | |||
| a7e3602908 | |||
| 74169860cf | |||
| 52bb774f73 | |||
| f26fcaf825 | |||
| b8e92e2f11 | |||
| 08adfc94d6 | |||
| 236fafb735 | |||
| 5ad9ddee3c | |||
| 24d220ff49 | |||
| 3364c195b7 | |||
| 50aa87d141 | |||
| 72b375023d | |||
| 77ba186818 | |||
| 2fe6de0505 | |||
| bf9e969b53 | |||
| 184f119b16 | |||
| ebc06f1abe | |||
| 0f8880ab0a | |||
| ee56da5092 | |||
| 2152004502 | |||
| 45d0b80d02 | |||
| 96065eb942 | |||
| ac944fee8b | |||
| 1d0e5fc353 | |||
| 1f97420207 | |||
| ae07f13a87 | |||
| 0aec504170 | |||
| 3b4c9bcc57 | |||
| 5182a6741e | |||
| da7635ae5c | |||
| a92a0fb60a | |||
| cb10c1753b | |||
| ae654bd4c8 | |||
| 28192655ec | |||
| 9582294eb8 | |||
| 0172430d7d | |||
| 1454b65933 | |||
| 432a7792e2 | |||
| 54069618b4 | |||
| 81feb313df | |||
| e6b275add3 | |||
| 27016a5527 | |||
| 4c29d517f0 | |||
| 180d27cc37 | |||
| 5a8b356dc7 | |||
| 3195640776 | |||
| f463296d47 | |||
| adf4b23c01 | |||
| d900a2b6a9 | |||
| 95a2fddfa8 | |||
| 8f7d21b692 | |||
| 3f84abec2f | |||
| b5c857aff4 | |||
| f8dee09107 | |||
| 84a800583c | |||
| 88de94f014 | |||
| 25549ec339 | |||
| fe4923bff6 | |||
| bb1a0b6bd2 | |||
| 879b5ead71 | |||
| 1670ec9167 | |||
| ac52667327 | |||
| 0d7c5c2108 | |||
| 73e3d19384 | |||
| f6e0f0282d | |||
| 3f42067a8f | |||
| ed6f5b98df | |||
| dd290e264c | |||
| c85484fc00 | |||
| 663dffd8be | |||
| c15d0c3d17 | |||
| bf09a54f35 | |||
| 930dd51663 | |||
| 12a523c7aa | |||
| ea9a6d57dd | |||
| 91958e1232 | |||
| 8925afb089 | |||
| ccafe7be4f | |||
| 8279690a8f | |||
| 763d3ae76a | |||
| b775e7f4d3 | |||
| 3d8d93ece5 | |||
| 06af306e8a | |||
| 9257f3c919 | |||
| 2fe7f4cf04 | |||
| 04399bc8bb | |||
| fcbcfbc3c0 | |||
| 3e4ce62dfe | |||
| d8292151e6 | |||
| 3d01a59b34 | |||
| 5df15c4105 | |||
| 75d695105d | |||
| 28189bdddf | |||
| f6885c7cf8 | |||
| 2c43f0824e | |||
| 13e2eea72f | |||
| 9441be1ee2 | |||
| d7ab2a362a | |||
| e920be3a72 | |||
| f771383c4b | |||
| 65c75f085a | |||
| 17503365f7 | |||
| ebf9f0ca63 | |||
| ae26d2756f | |||
| 124071f9be | |||
| 471f7d9c62 | |||
| a6a6b3bd06 | |||
| 48ad3dccda | |||
| 341c58a722 | |||
| 9b04f2da48 | |||
| f7a296544f | |||
| 78641a57ad | |||
| a77ff5ffec | |||
| bdd5e16db1 | |||
| d4672bfe79 | |||
| abd9fab41a | |||
| 7c8bf42ef9 | |||
| 274b555912 | |||
| 916530f0d8 | |||
| 95efd47f65 | |||
| 90ecb1af7f | |||
| d7fdca1b44 | |||
| 37346763dc | |||
| c35fd2755f | |||
| 281e3a0518 | |||
| 6349cdad2f | |||
| ef341dd405 | |||
| 198e5ce642 | |||
| 923fbac5b0 | |||
| 5f28c7ace7 | |||
| d96c96006f | |||
| 3ddf2d6f85 | |||
| ba6849f29c | |||
| 942170f902 | |||
| 248f993541 | |||
| 56d40bddd0 | |||
| 3a700a449a | |||
| a20f552bcf | |||
| 32331a56eb | |||
| d752b7e41c | |||
| 0b4223c6ca | |||
| a3ec5c13f0 | |||
| 128b582dd6 | |||
| e59ede5422 | |||
| 6d08ba2513 | |||
| 23444f4df0 | |||
| 3338f7a401 | |||
| b126519275 | |||
| 71e68b498e | |||
| fb267ee223 | |||
| 8e59b06611 | |||
| a4b3519428 | |||
| 4895fc3bbb | |||
| 3daabd6fa8 | |||
| 9fccb14065 | |||
| 12efe94fd1 | |||
| 375ef27b9f | |||
| 9a7fa39de4 | |||
| c779ad2e3b | |||
| 7e7ef289ba | |||
| 223d9ad414 | |||
| 948ea7b087 | |||
| bf771f8b6c | |||
| 6dc8aa396c | |||
| 92a48f9dc6 | |||
| d0ad9fcb1f | |||
| 539e6deca5 | |||
| df4c8003b8 | |||
| 169e748a78 | |||
| 39b365c6ae | |||
| 9a79bab43d | |||
| e229eda96e | |||
| 4448145aa9 | |||
| 3d042e708a | |||
| 2428d5f1c2 | |||
| f1dc2b4d2a | |||
| 7dfbcdbb81 | |||
| 5fd4f56fa2 | |||
| b9d5ba6b0a | |||
| 2a4cb07ba8 | |||
| 7939286176 | |||
| 46ef49b897 | |||
| b923d85f6a | |||
| 2862b4ecfb | |||
| 094acc62f0 | |||
| 13d17dc729 | |||
| 5cf3a13ca8 | |||
| d0898a3869 | |||
| 7158c9d2ea | |||
| c5cf17b60b | |||
| da58796768 | |||
| d98499a3fa | |||
| e5944567e8 | |||
| d296c12d01 | |||
| 4c3a9e69f2 | |||
| eb2540a3c8 | |||
| bf9a3615d9 | |||
| 33fb22e3e7 | |||
| f3ff398a44 | |||
| 533eb59a04 | |||
| 8ca29f6d49 | |||
| 0a33d38adf | |||
| f7afb60c1f | |||
| b9c605bf1a | |||
| 2983adc719 | |||
| 502393ee56 | |||
| 121bba1d9f | |||
| 3c1b70c355 | |||
| 27508dd1f0 | |||
| 6d962dbdf3 | |||
| 9194e6368a | |||
| 917fb7d626 | |||
| 3cf5794b96 | |||
| 631b0a1819 | |||
| 6662dcc4b0 | |||
| 95db54b819 | |||
| bc7d5042df | |||
| de3e1c3dbc | |||
| 3c6aac5435 | |||
| eeb755ab7d | |||
| 70d0dd51a5 | |||
| 073dd8b560 | |||
| b5d2924d46 | |||
| 597e279f34 | |||
| fc28def83d | |||
| f6efdfded4 | |||
| 91312496e0 | |||
| b557b4337d | |||
| bfde186aa0 | |||
| 2bd75dd1a9 | |||
| 27ab31a9b0 | |||
| 44a8b737d9 | |||
| b939ee7a09 | |||
| 0bae550520 | |||
| b5cc2f2bda | |||
| 9ad4cf1db9 | |||
| 9dbafaaea2 | |||
| 2db8b07578 | |||
| 7c1a7bfd9d | |||
| b7ef076798 | |||
| 37c29a073e | |||
| 0c288ea64b | |||
| 2476475174 | |||
| 71913c8164 | |||
| 6ec8432217 | |||
| 7a12c0e4d1 | |||
| 23a7eba16b | |||
| 3ba84a8e8b | |||
| 75476217a0 | |||
| 7771c0b905 | |||
| 3378e82ec7 | |||
| 126e43dea4 | |||
| f725009530 | |||
| 70d1e3a0cb | |||
| e751ce1220 | |||
| e09a27cf87 | |||
| 06fbf44724 | |||
| 200e409d91 | |||
| 5e5854e256 | |||
| 3df8bcfc9c | |||
| e76c14f9e0 | |||
| 6b6748b1c7 | |||
| d92d8e6dbb | |||
| c2b9dc5c75 | |||
| 5c1d27de2b | |||
| 6ab9e7cd68 | |||
| 3ef56e9ec1 | |||
| 6d8d157772 | |||
| cadd466eec | |||
| 3fea0c1e49 | |||
| 4c58201adc | |||
| 4fb4e72624 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.8.4
|
||||
current_version = 2021.9.2
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -27,7 +27,7 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 0.10.0-stable]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/question.md
vendored
2
.github/ISSUE_TEMPLATE/question.md
vendored
@ -20,7 +20,7 @@ If applicable, add screenshots to help explain your problem.
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- authentik version: [e.g. 0.10.0-stable]
|
||||
- authentik version: [e.g. 2021.8.5]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
|
||||
119
.github/workflows/ci-main.yml
vendored
119
.github/workflows/ci-main.yml
vendored
@ -2,8 +2,15 @@ name: authentik-ci-main
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
paths-ignore:
|
||||
- website
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
@ -18,7 +25,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run pylint
|
||||
run: pipenv run pylint authentik tests lifecycle
|
||||
@ -29,7 +43,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run black
|
||||
run: pipenv run black --check authentik tests lifecycle
|
||||
@ -40,7 +61,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run isort
|
||||
run: pipenv run isort --check authentik tests lifecycle
|
||||
@ -51,7 +79,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run bandit
|
||||
run: pipenv run bandit -r authentik tests lifecycle
|
||||
@ -78,7 +113,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
@ -86,6 +128,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
@ -94,14 +138,26 @@ jobs:
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations to stable
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
run: |
|
||||
python ./scripts/gh_do_set_branch.py
|
||||
- name: checkout current code
|
||||
run: |
|
||||
set -x
|
||||
git checkout $GITHUB_REF
|
||||
git fetch
|
||||
git checkout ${{ steps.ev.outputs.branchName }}
|
||||
pipenv sync --dev
|
||||
- name: migrate to latest
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
@ -112,7 +168,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
@ -124,7 +187,7 @@ jobs:
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace unittest.xml ?add
|
||||
testspace [unittest]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
test-integration:
|
||||
@ -134,14 +197,20 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: prepare k3d
|
||||
run: |
|
||||
wget -q -O - https://raw.githubusercontent.com/rancher/k3d/main/install.sh | bash
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1.2.0
|
||||
- name: run integration
|
||||
run: |
|
||||
pipenv run make test-integration
|
||||
@ -149,7 +218,7 @@ jobs:
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace unittest.xml ?add
|
||||
testspace [integration]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
test-e2e:
|
||||
@ -167,11 +236,24 @@ jobs:
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
docker-compose -f tests/e2e/ci.docker-compose.yml up -d
|
||||
- id: cache-web
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/**') }}
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd web
|
||||
npm i
|
||||
@ -183,24 +265,9 @@ jobs:
|
||||
- name: run testspace
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
testspace unittest.xml ?add
|
||||
testspace [e2e]unittest.xml --link=codecov
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v2
|
||||
report:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- name: finish testspace
|
||||
run: |
|
||||
testspace ?finish
|
||||
build:
|
||||
needs:
|
||||
- lint-pylint
|
||||
@ -220,11 +287,13 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_do_set_branch.py
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: beryju.org
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
@ -232,9 +301,9 @@ jobs:
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}
|
||||
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
|
||||
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
|
||||
19
.github/workflows/ci-outpost.yml
vendored
19
.github/workflows/ci-outpost.yml
vendored
@ -2,6 +2,13 @@ name: authentik-ci-outpost
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
lint-golint:
|
||||
@ -11,9 +18,6 @@ jobs:
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '^1.16.3'
|
||||
- name: Generate API
|
||||
run: |
|
||||
make gen-outpost
|
||||
- name: Run linter
|
||||
run: |
|
||||
# Create folder structure for go embeds
|
||||
@ -37,17 +41,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: prepare variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.HARBOR_USERNAME }}
|
||||
run: |
|
||||
python ./scripts/gh_do_set_branch.py
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v1
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: beryju.org
|
||||
username: ${{ secrets.HARBOR_USERNAME }}
|
||||
@ -55,12 +59,11 @@ jobs:
|
||||
- name: Building Docker Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchName }}
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
|
||||
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
|
||||
7
.github/workflows/ci-web.yml
vendored
7
.github/workflows/ci-web.yml
vendored
@ -2,6 +2,13 @@ name: authentik-ci-web
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next
|
||||
- version-*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
lint-eslint:
|
||||
|
||||
24
.github/workflows/release-publish.yml
vendored
24
.github/workflows/release-publish.yml
vendored
@ -33,14 +33,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.8.4,
|
||||
beryju/authentik:2021.9.2,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.8.4,
|
||||
ghcr.io/goauthentik/server:2021.9.2,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.4', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.9.2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
@ -75,14 +75,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-proxy:2021.8.4,
|
||||
beryju/authentik-proxy:2021.9.2,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.8.4,
|
||||
ghcr.io/goauthentik/proxy:2021.9.2,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: proxy.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.4', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.9.2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
@ -117,14 +117,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.8.4,
|
||||
beryju/authentik-ldap:2021.9.2,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.8.4,
|
||||
ghcr.io/goauthentik/ldap:2021.9.2,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.8.4', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.9.2', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
@ -157,9 +157,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2.4.0
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: '16'
|
||||
- name: Build web api client and web ui
|
||||
run: |
|
||||
export NODE_ENV=production
|
||||
@ -175,7 +175,7 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.8.4
|
||||
version: authentik@2021.9.2
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
||||
2
.github/workflows/web-api-publish.yml
vendored
2
.github/workflows/web-api-publish.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.x'
|
||||
node-version: '16'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Generate API Client
|
||||
run: make gen-web
|
||||
|
||||
27
Dockerfile
27
Dockerfile
@ -18,22 +18,7 @@ COPY ./website /static/
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build-docs-only
|
||||
|
||||
# Stage 3: Generate API Client
|
||||
FROM openapitools/openapi-generator-cli as go-api-builder
|
||||
|
||||
COPY ./schema.yml /local/schema.yml
|
||||
|
||||
RUN docker-entrypoint.sh generate \
|
||||
--git-host goauthentik.io \
|
||||
--git-repo-id outpost \
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true && \
|
||||
rm -f /local/api/go.mod /local/api/go.sum
|
||||
|
||||
# Stage 4: Build webui
|
||||
# Stage 3: Build webui
|
||||
FROM node as web-builder
|
||||
|
||||
COPY ./web /static/
|
||||
@ -41,8 +26,8 @@ COPY ./web /static/
|
||||
ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build
|
||||
|
||||
# Stage 5: Build go proxy
|
||||
FROM golang:1.17.0 AS builder
|
||||
# Stage 4: Build go proxy
|
||||
FROM golang:1.17.1 AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -52,7 +37,6 @@ COPY --from=web-builder /static/dist/ /work/web/dist/
|
||||
COPY --from=web-builder /static/authentik/ /work/web/authentik/
|
||||
COPY --from=website-builder /static/help/ /work/website/help/
|
||||
|
||||
COPY --from=go-api-builder /local/api api
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./website/static.go /work/website/static.go
|
||||
@ -62,7 +46,7 @@ COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 6: Run
|
||||
# Stage 5: Run
|
||||
FROM python:3.9-slim-buster
|
||||
|
||||
WORKDIR /
|
||||
@ -97,6 +81,7 @@ COPY --from=builder /work/authentik /authentik-proxy
|
||||
|
||||
USER authentik
|
||||
ENV TMPDIR /dev/shm/
|
||||
ENV PYTHONUBUFFERED 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV prometheus_multiproc_dir /dev/shm/
|
||||
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
||||
ENTRYPOINT [ "/lifecycle/ak" ]
|
||||
|
||||
8
Makefile
8
Makefile
@ -7,8 +7,6 @@ NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
all: lint-fix lint test gen
|
||||
|
||||
test-integration:
|
||||
k3d cluster create || exit 0
|
||||
k3d kubeconfig write -o ~/.kube/config --overwrite
|
||||
coverage run manage.py test -v 3 tests/integration
|
||||
|
||||
test-e2e:
|
||||
@ -61,13 +59,13 @@ gen-outpost:
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,disallowAdditionalPropertiesIfNotPresent=false
|
||||
rm -f api/go.mod api/go.sum
|
||||
|
||||
gen: gen-build gen-clean gen-web gen-outpost
|
||||
gen: gen-build gen-clean gen-web
|
||||
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
run:
|
||||
go run -v cmd/server/main.go
|
||||
WORKERS=1 go run -v cmd/server/main.go
|
||||
|
||||
3
Pipfile
3
Pipfile
@ -49,9 +49,6 @@ ua-parser = "*"
|
||||
deepmerge = "*"
|
||||
colorama = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
|
||||
[dev-packages]
|
||||
bandit = "*"
|
||||
black = "==21.5b1"
|
||||
|
||||
204
Pipfile.lock
generated
204
Pipfile.lock
generated
@ -1,12 +1,10 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "f0befa9b3dacc1c3363b9442fa7a43f6be2c46a8fcb80a994230d288a384e54d"
|
||||
"sha256": "19d5324fd1a4af125ed57a683030ca14ee2d3648117748e4b32656875484728e"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9"
|
||||
},
|
||||
"requires": {},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
@ -122,19 +120,19 @@
|
||||
},
|
||||
"boto3": {
|
||||
"hashes": [
|
||||
"sha256:5116e9bdec19adcc5531a9b7b535be77d5314eef092aaf7033ace48a9be65036",
|
||||
"sha256:658ddf4ba552f654fd4d48335fa95ff4e3e1a4e82f90021a1a1d3de4a5428ba4"
|
||||
"sha256:3d8b1c76a2d40775b3a8a5c457293741641bf3b6b7150e3ad351e584bb50786e",
|
||||
"sha256:f7e8ce6155a4d4fc23796cb58ea4d28dd4bbb61198a0da8ff2efcbee395c453c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.18.34"
|
||||
"version": "==1.18.46"
|
||||
},
|
||||
"botocore": {
|
||||
"hashes": [
|
||||
"sha256:1b4999fb0e1a4c050c4d9118ebdaac8d83761ef32c3c0f13a25f9204045998fe",
|
||||
"sha256:ec2cdf1c8ed64a7f392f352125d248c76103fa9d137b275b7c76836776cedf56"
|
||||
"sha256:58622d4d84adcbc352d82ab8a7ec512c7af862bcffd3b93225b416a87f46a6a2",
|
||||
"sha256:a5df461647d1080185e91c3078ab570cc6fc346df05b9decac9fca68c149b7b8"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.21.34"
|
||||
"version": "==1.21.46"
|
||||
},
|
||||
"cachetools": {
|
||||
"hashes": [
|
||||
@ -254,11 +252,11 @@
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
|
||||
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
|
||||
"sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6",
|
||||
"sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.4"
|
||||
"version": "==2.0.6"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
@ -310,8 +308,10 @@
|
||||
"sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7",
|
||||
"sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085",
|
||||
"sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc",
|
||||
"sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d",
|
||||
"sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a",
|
||||
"sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498",
|
||||
"sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89",
|
||||
"sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9",
|
||||
"sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c",
|
||||
"sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7",
|
||||
@ -395,11 +395,11 @@
|
||||
},
|
||||
"django-otp": {
|
||||
"hashes": [
|
||||
"sha256:01b5888f0bde5125e139433aacb947e52d5c406fa56c9db43c3e8d75b5c323c4",
|
||||
"sha256:0d56dd2a7fbb6ee6e54557e036ca64add0bd3596f471794bad673b7637d5e935"
|
||||
"sha256:0c03a471db9e876f3671314bc9a65bd56a5c3c108ee0562c473701310bba4a77",
|
||||
"sha256:4c90cdaed683d736b0efafc034a3c6b410e1be2a53c24da287165b1f371d8776"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.6"
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"django-prometheus": {
|
||||
"hashes": [
|
||||
@ -443,19 +443,19 @@
|
||||
},
|
||||
"docker": {
|
||||
"hashes": [
|
||||
"sha256:5aafaec0d2a1de0e32010b43b5eac9f6f851c9db99a46ad32b8e44eeeb55616d",
|
||||
"sha256:b88eef725b33c0ed59c67506631bbb09b480b7ca5a739bbbb948b446443fe914"
|
||||
"sha256:21ec4998e90dff7a7aaaa098ca8d839c7de412b89e6f6c30908372d58fecf663",
|
||||
"sha256:9b17f0723d83c1f3418d2aa17bf90b24dbe97deda06208dd4262fa30a6ee87eb"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.0.1"
|
||||
"version": "==5.0.2"
|
||||
},
|
||||
"drf-spectacular": {
|
||||
"hashes": [
|
||||
"sha256:98681add6671db9e6dba5f0d3dcf8aab5950cbb978497390507356e593bf082f",
|
||||
"sha256:a430bab0f4ecfc90786b7b63bbee3f9a56094201fbed9bdfbf952e99e6469104"
|
||||
"sha256:65df818226477cdfa629947ea52bc0cc13eb40550b192eeccec64a6b782651fd",
|
||||
"sha256:f71205da3645d770545abeaf48e8a15afd6ee9a76e57c03df4592e51be1059bf"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.18.1"
|
||||
"version": "==0.19.0"
|
||||
},
|
||||
"duo-client": {
|
||||
"hashes": [
|
||||
@ -482,19 +482,19 @@
|
||||
},
|
||||
"geoip2": {
|
||||
"hashes": [
|
||||
"sha256:906a1dbf15a179a1af3522970e8420ab15bb3e0afc526942cc179e12146d9c1d",
|
||||
"sha256:b97b44031fdc463e84eb1316b4f19edd978cb1d78703465fcb1e36dc5a822ba6"
|
||||
"sha256:599914784cea08b50fb50c22ed6a59143b5ff2d027ba782d2d5b6f3668293821",
|
||||
"sha256:7ad10942e9fd6c54bc850d8311618f04dacd3fff5b55ba48b7ad83502fc884bd"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.2.0"
|
||||
"version": "==4.3.0"
|
||||
},
|
||||
"google-auth": {
|
||||
"hashes": [
|
||||
"sha256:104475dc4d57bbae49017aea16fffbb763204fa2d6a70f1f3cc79962c1a383a4",
|
||||
"sha256:cde472372e030e1e0bc64dac00fb53e6c095d7ab641f4281e2c995e85e205d8b"
|
||||
"sha256:7ae5eda089d393ca01658b550df24913cbbbdd34e9e6dedc1cea747485ae0c04",
|
||||
"sha256:bde03220ed56e4e147dec92339c90ce95159dce657e2cccd0ac1fe82f6a96284"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.2"
|
||||
"version": "==2.1.0"
|
||||
},
|
||||
"gunicorn": {
|
||||
"hashes": [
|
||||
@ -706,10 +706,10 @@
|
||||
},
|
||||
"maxminddb": {
|
||||
"hashes": [
|
||||
"sha256:47e86a084dd814fac88c99ea34ba3278a74bc9de5a25f4b815b608798747c7dc"
|
||||
"sha256:c47b8acba98d03b8c762684d899623c257976f3eb0c9d557ff865d20cddc9d6b"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2.0.3"
|
||||
"version": "==2.1.0"
|
||||
},
|
||||
"msgpack": {
|
||||
"hashes": [
|
||||
@ -1084,11 +1084,11 @@
|
||||
},
|
||||
"sentry-sdk": {
|
||||
"hashes": [
|
||||
"sha256:ebe99144fa9618d4b0e7617e7929b75acd905d258c3c779edcd34c0adfffe26c",
|
||||
"sha256:f33d34c886d0ba24c75ea8885a8b3a172358853c7cbde05979fc99c29ef7bc52"
|
||||
"sha256:4297555ddc37c7136740e6b547b7d68f5bca0b4832f94ac097e5d531a4c77528",
|
||||
"sha256:ea04bc3be6eb082f34ff3f8f6380ea9c691766592298f3f975a435dafac6bf6a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.1"
|
||||
"version": "==1.4.1"
|
||||
},
|
||||
"service-identity": {
|
||||
"hashes": [
|
||||
@ -1108,11 +1108,11 @@
|
||||
},
|
||||
"sqlparse": {
|
||||
"hashes": [
|
||||
"sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0",
|
||||
"sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"
|
||||
"sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae",
|
||||
"sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==0.4.1"
|
||||
"version": "==0.4.2"
|
||||
},
|
||||
"structlog": {
|
||||
"hashes": [
|
||||
@ -1178,11 +1178,11 @@
|
||||
"secure"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
|
||||
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
|
||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.26.6"
|
||||
"version": "==1.26.7"
|
||||
},
|
||||
"uvicorn": {
|
||||
"extras": [
|
||||
@ -1256,58 +1256,50 @@
|
||||
},
|
||||
"websockets": {
|
||||
"hashes": [
|
||||
"sha256:0dd4eb8e0bbf365d6f652711ce21b8fd2b596f873d32aabb0fbb53ec604418cc",
|
||||
"sha256:1d0971cc7251aeff955aa742ec541ee8aaea4bb2ebf0245748fbec62f744a37e",
|
||||
"sha256:1d6b4fddb12ab9adf87b843cd4316c4bd602db8d5efd2fb83147f0458fe85135",
|
||||
"sha256:230a3506df6b5f446fed2398e58dcaafdff12d67fe1397dff196411a9e820d02",
|
||||
"sha256:276d2339ebf0df4f45df453923ebd2270b87900eda5dfd4a6b0cfa15f82111c3",
|
||||
"sha256:2cf04601633a4ec176b9cc3d3e73789c037641001dbfaf7c411f89cd3e04fcaf",
|
||||
"sha256:3ddff38894c7857c476feb3538dd847514379d6dc844961dc99f04b0384b1b1b",
|
||||
"sha256:48c222feb3ced18f3dc61168ca18952a22fb88e5eb8902d2bf1b50faefdc34a2",
|
||||
"sha256:51d04df04ed9d08077d10ccbe21e6805791b78eac49d16d30a1f1fe2e44ba0af",
|
||||
"sha256:597c28f3aa7a09e8c070a86b03107094ee5cdafcc0d55f2f2eac92faac8dc67d",
|
||||
"sha256:5c8f0d82ea2468282e08b0cf5307f3ad022290ed50c45d5cb7767957ca782880",
|
||||
"sha256:7189e51955f9268b2bdd6cc537e0faa06f8fffda7fb386e5922c6391de51b077",
|
||||
"sha256:7df3596838b2a0c07c6f6d67752c53859a54993d4f062689fdf547cb56d0f84f",
|
||||
"sha256:826ccf85d4514609219725ba4a7abd569228c2c9f1968e8be05be366f68291ec",
|
||||
"sha256:836d14eb53b500fd92bd5db2fc5894f7c72b634f9c2a28f546f75967503d8e25",
|
||||
"sha256:85db8090ba94e22d964498a47fdd933b8875a1add6ebc514c7ac8703eb97bbf0",
|
||||
"sha256:85e701a6c316b7067f1e8675c638036a796fe5116783a4c932e7eb8e305a3ffe",
|
||||
"sha256:900589e19200be76dd7cbaa95e9771605b5ce3f62512d039fb3bc5da9014912a",
|
||||
"sha256:9147868bb0cc01e6846606cd65cbf9c58598f187b96d14dd1ca17338b08793bb",
|
||||
"sha256:9e7fdc775fe7403dbd8bc883ba59576a6232eac96dacb56512daacf7af5d618d",
|
||||
"sha256:ab5ee15d3462198c794c49ccd31773d8a2b8c17d622aa184f669d2b98c2f0857",
|
||||
"sha256:ad893d889bc700a5835e0a95a3e4f2c39e91577ab232a3dc03c262a0f8fc4b5c",
|
||||
"sha256:b2e71c4670ebe1067fa8632f0d081e47254ee2d3d409de54168b43b0ba9147e0",
|
||||
"sha256:b43b13e5622c5a53ab12f3272e6f42f1ce37cd5b6684b2676cb365403295cd40",
|
||||
"sha256:b4ad84b156cf50529b8ac5cc1638c2cf8680490e3fccb6121316c8c02620a2e4",
|
||||
"sha256:be5fd35e99970518547edc906efab29afd392319f020c3c58b0e1a158e16ed20",
|
||||
"sha256:caa68c95bc1776d3521f81eeb4d5b9438be92514ec2a79fececda814099c8314",
|
||||
"sha256:d144b350045c53c8ff09aa1cfa955012dd32f00c7e0862c199edcabb1a8b32da",
|
||||
"sha256:d2c2d9b24d3c65b5a02cac12cbb4e4194e590314519ed49db2f67ef561c3cf58",
|
||||
"sha256:e9e5fd6dbdf95d99bc03732ded1fc8ef22ebbc05999ac7e0c7bf57fe6e4e5ae2",
|
||||
"sha256:ebf459a1c069f9866d8569439c06193c586e72c9330db1390af7c6a0a32c4afd",
|
||||
"sha256:f31722f1c033c198aa4a39a01905951c00bd1c74f922e8afc1b1c62adbcdd56a",
|
||||
"sha256:f68c352a68e5fdf1e97288d5cec9296664c590c25932a8476224124aaf90dbcd"
|
||||
"sha256:01db0ecd1a0ca6702d02a5ed40413e18b7d22f94afb3bbe0d323bac86c42c1c8",
|
||||
"sha256:085bb8a6e780d30eaa1ba48ac7f3a6707f925edea787cfb761ce5a39e77ac09b",
|
||||
"sha256:1ac35426fe3e7d3d0fac3d63c8965c76ed67a8fd713937be072bf0ce22808539",
|
||||
"sha256:1f6b814cff6aadc4288297cb3a248614829c6e4ff5556593c44a115e9dd49939",
|
||||
"sha256:2a43072e434c041a99f2e1eb9b692df0232a38c37c61d00e9f24db79474329e4",
|
||||
"sha256:5b2600e01c7ca6f840c42c747ffbe0254f319594ed108db847eb3d75f4aacb80",
|
||||
"sha256:62160772314920397f9d219147f958b33fa27a12c662d4455c9ccbba9a07e474",
|
||||
"sha256:706e200fc7f03bed99ad0574cd1ea8b0951477dd18cc978ccb190683c69dba76",
|
||||
"sha256:71358c7816e2762f3e4af3adf0040f268e219f5a38cb3487a9d0fc2e554fef6a",
|
||||
"sha256:7d2e12e4f901f1bc062dfdf91831712c4106ed18a9a4cdb65e2e5f502124ca37",
|
||||
"sha256:7f79f02c7f9a8320aff7d3321cd1c7e3a7dbc15d922ac996cca827301ee75238",
|
||||
"sha256:82b17524b1ce6ae7f7dd93e4d18e9b9474071e28b65dbf1dfe9b5767778db379",
|
||||
"sha256:82bd921885231f4a30d9bc550552495b3fc36b1235add6d374e7c65c3babd805",
|
||||
"sha256:8bbf8660c3f833ddc8b1afab90213f2e672a9ddac6eecb3cde968e6b2807c1c7",
|
||||
"sha256:9a4d889162bd48588e80950e07fa5e039eee9deb76a58092e8c3ece96d7ef537",
|
||||
"sha256:b4ade7569b6fd17912452f9c3757d96f8e4044016b6d22b3b8391e641ca50456",
|
||||
"sha256:b8176deb6be540a46695960a765a77c28ac8b2e3ef2ec95d50a4f5df901edb1c",
|
||||
"sha256:c4fc9a1d242317892590abe5b61a9127f1a61740477bfb121743f290b8054002",
|
||||
"sha256:c5880442f5fc268f1ef6d37b2c152c114deccca73f48e3a8c48004d2f16f4567",
|
||||
"sha256:cd8c6f2ec24aedace251017bc7a414525171d4e6578f914acab9349362def4da",
|
||||
"sha256:d67646ddd17a86117ae21c27005d83c1895c0cef5d7be548b7549646372f868a",
|
||||
"sha256:e42a1f1e03437b017af341e9bbfdc09252cd48ef32a8c3c3ead769eab3b17368",
|
||||
"sha256:eb282127e9c136f860c6068a4fba5756eb25e755baffb5940b6f1eae071928b2",
|
||||
"sha256:fe83b3ec9ef34063d86dfe1029160a85f24a5a94271036e5714a57acfdd089a1",
|
||||
"sha256:ff59c6bdb87b31f7e2d596f09353d5a38c8c8ff571b0e2238e8ee2d55ad68465"
|
||||
],
|
||||
"version": "==9.1"
|
||||
"version": "==10.0"
|
||||
},
|
||||
"xmlsec": {
|
||||
"hashes": [
|
||||
"sha256:23f209260b37bdc2fd96af837494c47dd1e67964f077442b63acd83c0f62e212",
|
||||
"sha256:4fb38ab0bf3e47cbae136119674a869e09d61c939b510350f369c8ac46087373",
|
||||
"sha256:705ab5b848afdf3a5c78b1322276054c885f44dc51601e14cb883a9c86cbe20f",
|
||||
"sha256:843d10bba4c480609da74ee11fff1ee0fc1c12821c656979f12a7a4ecb043e03",
|
||||
"sha256:86d54b93f8278e2f0c504d0744e39a483c1c7ce9993f2ca70184cc7770faa982",
|
||||
"sha256:8922fba55a060ee81de4a7f5efc593c5bf121047763aecf0eead02e061c9d2db",
|
||||
"sha256:c7b49d4fce83186b89f7ce6cec765245d36a70d0acc2f3ed0ba95c735b3667da",
|
||||
"sha256:cd2eaaff7f31784a07dd99ce81fa767313df3ba1834faa4143ee2c07000cac7a",
|
||||
"sha256:dea5bef9b5830c36ccb7a68a0d94d49eaea4d03fbbd04179652bf661b7e6e30f",
|
||||
"sha256:eadff662d89c80db409c69d82eb3e695e16d4a5e8ab56b5b22670a54e9c6ff20",
|
||||
"sha256:ee233d0bc27fb8f447ca2622b0de2ac2df45b8795f02ef263825912011fe4fe9"
|
||||
"sha256:135724cdce60e6bbd072fca6f09a21f72e2cecc59eebb4eed7740c316ecabc7b",
|
||||
"sha256:1b4377f6d37ad714ba95a227ef40fb54ba1b22ef5170ce04c330fe45ee6ad184",
|
||||
"sha256:2c86ac6ce570c9e04f04da0cd5e7d3db346e4b5b1d006311606368f17c756ef9",
|
||||
"sha256:4e5f565de311afa33aaee4724566e685f951afe301212b6cf82f98cf9d8a1749",
|
||||
"sha256:9a2b8a780093b0fe8cecae53a81a8cd9edd50c08980d374c5317c91f065042d9",
|
||||
"sha256:ce9c681adbc87b4f06c2b16725d9b2edbdbd508117dae4288b5faf78c1406038",
|
||||
"sha256:d22da4d3dcc559fb2e54e782f39c9ddad5f8d5b356f86a79bbb80b0a45115c97",
|
||||
"sha256:db3e18ca883c01bbe28c9f5197c66f676c9772cf2d85f667e6122fc4d0702225",
|
||||
"sha256:e4783f7814aa2a3e318385cce8ef87c82954b9a59535a48f67da4e2c21c08ce1",
|
||||
"sha256:f32e54065f0404ceff71388daa7fa7df10e1fb800051dfe302d63abb0acf0020",
|
||||
"sha256:f5d242b1a19a36078608f5d7f4d561c5ca55cac8061a323a071c06275267dc19"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.11"
|
||||
"version": "==1.3.12"
|
||||
},
|
||||
"yarl": {
|
||||
"hashes": [
|
||||
@ -1420,11 +1412,11 @@
|
||||
},
|
||||
"astroid": {
|
||||
"hashes": [
|
||||
"sha256:3b680ce0419b8a771aba6190139a3998d14b413852506d99aff8dc2bf65ee67c",
|
||||
"sha256:dc1e8b28427d6bbef6b8842b18765ab58f558c42bb80540bd7648c98412af25e"
|
||||
"sha256:dcc06f6165f415220013801642bd6c9808a02967070919c4b746c6864c205471",
|
||||
"sha256:fe81f80c0b35264acb5653302ffbd935d394f1775c5e4487df745bf9c2442708"
|
||||
],
|
||||
"markers": "python_version ~= '3.6'",
|
||||
"version": "==2.7.3"
|
||||
"version": "==2.8.0"
|
||||
},
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
@ -1467,11 +1459,11 @@
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
|
||||
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
|
||||
"sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6",
|
||||
"sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.4"
|
||||
"version": "==2.0.6"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
@ -1557,11 +1549,11 @@
|
||||
},
|
||||
"gitpython": {
|
||||
"hashes": [
|
||||
"sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b",
|
||||
"sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"
|
||||
"sha256:dc0a7f2f697657acc8d7f89033e8b1ea94dd90356b2983bca89dc8d2ab3cc647",
|
||||
"sha256:df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.1.18"
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.1.24"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
@ -1582,7 +1574,7 @@
|
||||
"sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899",
|
||||
"sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"
|
||||
],
|
||||
"markers": "python_version < '4' and python_full_version >= '3.6.1'",
|
||||
"markers": "python_version < '4.0' and python_full_version >= '3.6.1'",
|
||||
"version": "==5.9.3"
|
||||
},
|
||||
"lazy-object-proxy": {
|
||||
@ -1676,11 +1668,11 @@
|
||||
},
|
||||
"pylint": {
|
||||
"hashes": [
|
||||
"sha256:6758cce3ddbab60c52b57dcc07f0c5d779e5daf0cf50f6faacbef1d3ea62d2a1",
|
||||
"sha256:e178e96b6ba171f8ef51fbce9ca30931e6acbea4a155074d80cc081596c9e852"
|
||||
"sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126",
|
||||
"sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.10.2"
|
||||
"version": "==2.11.1"
|
||||
},
|
||||
"pylint-django": {
|
||||
"hashes": [
|
||||
@ -1858,16 +1850,24 @@
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.10.2"
|
||||
},
|
||||
"typing-extensions": {
|
||||
"hashes": [
|
||||
"sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e",
|
||||
"sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7",
|
||||
"sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"
|
||||
],
|
||||
"version": "==3.10.0.2"
|
||||
},
|
||||
"urllib3": {
|
||||
"extras": [
|
||||
"secure"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
|
||||
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
|
||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.26.6"
|
||||
"version": "==1.26.7"
|
||||
},
|
||||
"wrapt": {
|
||||
"hashes": [
|
||||
|
||||
10
README.md
10
README.md
@ -4,14 +4,14 @@
|
||||
|
||||
---
|
||||
|
||||
[](https://discord.gg/jg33eMhnj6)
|
||||

|
||||

|
||||

|
||||
[](https://discord.gg/jg33eMhnj6)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-main.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||
[](https://goauthentik.testspace.com/)
|
||||

|
||||

|
||||

|
||||
[](https://www.transifex.com/beryjuorg/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
@ -6,9 +6,8 @@
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2021.5.x | :white_check_mark: |
|
||||
| 2021.6.x | :white_check_mark: |
|
||||
| 2021.7.x | :white_check_mark: |
|
||||
| 2021.8.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.8.4"
|
||||
__version__ = "2021.9.2"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
@ -6,12 +6,14 @@ from django.core.cache import cache
|
||||
from django.core.validators import URLValidator
|
||||
from packaging.version import parse
|
||||
from prometheus_client import Info
|
||||
from requests import RequestException, get
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -36,12 +38,17 @@ def _set_prom_info():
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.y_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||
return
|
||||
try:
|
||||
response = get("https://api.github.com/repos/goauthentik/authentik/releases/latest")
|
||||
response = get_http_session().get(
|
||||
"https://version.goauthentik.io/version.json",
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
tag_name = data.get("tag_name")
|
||||
upstream_version = tag_name.split("/")[1]
|
||||
upstream_version = data.get("stable", {}).get("version")
|
||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(
|
||||
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
|
||||
@ -58,7 +65,7 @@ def update_latest_version(self: MonitoredTask):
|
||||
).exists():
|
||||
return
|
||||
event_dict = {"new_version": upstream_version}
|
||||
if match := re.search(URL_FINDER, data.get("body", "")):
|
||||
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
|
||||
event_dict["message"] = f"Changelog: {match.group()}"
|
||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
|
||||
@ -1,52 +1,28 @@
|
||||
"""test admin tasks"""
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.test import TestCase
|
||||
from requests.exceptions import RequestException
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
|
||||
@dataclass
|
||||
class MockResponse:
|
||||
"""Mock class to emulate the methods of requests's Response we need"""
|
||||
|
||||
status_code: int
|
||||
response: str
|
||||
|
||||
def json(self) -> dict:
|
||||
"""Get json parsed response"""
|
||||
return json.loads(self.response)
|
||||
|
||||
def raise_for_status(self):
|
||||
"""raise RequestException if status code is 400 or more"""
|
||||
if self.status_code >= 400:
|
||||
raise RequestException
|
||||
|
||||
|
||||
REQUEST_MOCK_VALID = Mock(
|
||||
return_value=MockResponse(
|
||||
200,
|
||||
"""{
|
||||
"tag_name": "version/99999999.9999999",
|
||||
"body": "https://goauthentik.io/test"
|
||||
}""",
|
||||
)
|
||||
)
|
||||
|
||||
REQUEST_MOCK_INVALID = Mock(return_value=MockResponse(400, "{}"))
|
||||
RESPONSE_VALID = {
|
||||
"$schema": "https://version.goauthentik.io/schema.json",
|
||||
"stable": {
|
||||
"version": "99999999.9999999",
|
||||
"changelog": "See https://goauthentik.io/test",
|
||||
"reason": "bugfix",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class TestAdminTasks(TestCase):
|
||||
"""test admin tasks"""
|
||||
|
||||
@patch("authentik.admin.tasks.get", REQUEST_MOCK_VALID)
|
||||
def test_version_valid_response(self):
|
||||
"""Test Update checker with valid response"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
|
||||
self.assertTrue(
|
||||
@ -69,9 +45,10 @@ class TestAdminTasks(TestCase):
|
||||
1,
|
||||
)
|
||||
|
||||
@patch("authentik.admin.tasks.get", REQUEST_MOCK_INVALID)
|
||||
def test_version_error(self):
|
||||
"""Test Update checker with invalid response"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get("https://version.goauthentik.io/version.json", status_code=400)
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
self.assertFalse(
|
||||
|
||||
@ -40,7 +40,6 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
|
||||
raise AuthenticationFailed("Malformed header")
|
||||
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
|
||||
if not tokens.exists():
|
||||
LOGGER.info("Authenticating via secret_key")
|
||||
user = token_secret_key(password)
|
||||
if not user:
|
||||
raise AuthenticationFailed("Token invalid/expired")
|
||||
@ -58,6 +57,7 @@ def token_secret_key(value: str) -> Optional[User]:
|
||||
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
|
||||
if not outposts:
|
||||
return None
|
||||
LOGGER.info("Authenticating via secret_key")
|
||||
outpost = outposts.first()
|
||||
return outpost.user
|
||||
|
||||
|
||||
@ -33,3 +33,12 @@ class OwnerPermissions(BasePermission):
|
||||
if owner != request.user:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class OwnerSuperuserPermissions(OwnerPermissions):
|
||||
"""Similar to OwnerPermissions, except always allow access for superusers"""
|
||||
|
||||
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
|
||||
if request.user.is_superuser:
|
||||
return True
|
||||
return super().has_object_permission(request, view, obj)
|
||||
|
||||
@ -5,6 +5,9 @@ from typing import Callable, Optional
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||
@ -18,10 +21,12 @@ def permission_required(perm: Optional[str] = None, other_perms: Optional[list[s
|
||||
if perm:
|
||||
obj = self.get_object()
|
||||
if not request.user.has_perm(perm, obj):
|
||||
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||
return self.permission_denied(request)
|
||||
if other_perms:
|
||||
for other_perm in other_perms:
|
||||
if not request.user.has_perm(other_perm):
|
||||
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||
return self.permission_denied(request)
|
||||
return func(self, request, *args, **kwargs)
|
||||
|
||||
|
||||
@ -31,7 +31,7 @@ VALIDATION_ERROR = build_object_type(
|
||||
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=["detail"],
|
||||
required=[],
|
||||
additionalProperties={},
|
||||
)
|
||||
|
||||
|
||||
19
authentik/api/tasks.py
Normal file
19
authentik/api/tasks.py
Normal file
@ -0,0 +1,19 @@
|
||||
"""API tasks"""
|
||||
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
SENTRY_SESSION = get_http_session()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def sentry_proxy(payload: str):
|
||||
"""Relay data to sentry"""
|
||||
SENTRY_SESSION.post(
|
||||
"https://sentry.beryju.org/api/8/envelope/",
|
||||
data=payload,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
@ -4,17 +4,19 @@ from json import loads
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from requests import post
|
||||
from requests.exceptions import RequestException
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
from rest_framework.parsers import BaseParser
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.throttling import AnonRateThrottle
|
||||
from rest_framework.views import APIView
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.tasks import sentry_proxy
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class PlainTextParser(BaseParser):
|
||||
"""Plain text parser."""
|
||||
@ -46,21 +48,18 @@ class SentryTunnelView(APIView):
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
# Only allow usage of this endpoint when error reporting is enabled
|
||||
if not CONFIG.y_bool("error_reporting.enabled", False):
|
||||
LOGGER.debug("error reporting disabled")
|
||||
return HttpResponse(status=400)
|
||||
# Body is 2 json objects separated by \n
|
||||
full_body = request.body
|
||||
header = loads(full_body.splitlines()[0])
|
||||
lines = full_body.splitlines()
|
||||
if len(lines) < 1:
|
||||
return HttpResponse(status=400)
|
||||
header = loads(lines[0])
|
||||
# Check that the DSN is what we expect
|
||||
dsn = header.get("dsn", "")
|
||||
if dsn != settings.SENTRY_DSN:
|
||||
LOGGER.debug("Invalid dsn", have=dsn, expected=settings.SENTRY_DSN)
|
||||
return HttpResponse(status=400)
|
||||
response = post(
|
||||
"https://sentry.beryju.org/api/8/envelope/",
|
||||
data=full_body,
|
||||
headers={"Content-Type": "application/octet-stream"},
|
||||
)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except RequestException:
|
||||
return HttpResponse(status=500)
|
||||
return HttpResponse(status=response.status_code)
|
||||
sentry_proxy.delay(full_body.decode())
|
||||
return HttpResponse(status=204)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
"""api v3 urls"""
|
||||
from django.urls import path
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.cache import cache_page
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from rest_framework import routers
|
||||
|
||||
@ -24,6 +24,7 @@ from authentik.core.api.users import UserViewSet
|
||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||
from authentik.events.api.event import EventViewSet
|
||||
from authentik.events.api.notification import NotificationViewSet
|
||||
from authentik.events.api.notification_mapping import NotificationWebhookMappingViewSet
|
||||
from authentik.events.api.notification_rule import NotificationRuleViewSet
|
||||
from authentik.events.api.notification_transport import NotificationTransportViewSet
|
||||
from authentik.flows.api.bindings import FlowStageBindingViewSet
|
||||
@ -98,6 +99,7 @@ from authentik.stages.user_write.api import UserWriteStageViewSet
|
||||
from authentik.tenants.api import TenantViewSet
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
router.include_format_suffixes = False
|
||||
|
||||
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
|
||||
router.register("admin/apps", AppsViewSet, basename="apps")
|
||||
@ -159,6 +161,7 @@ router.register("propertymappings/all", PropertyMappingViewSet)
|
||||
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
|
||||
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
router.register("authenticators/static", StaticDeviceViewSet)
|
||||
@ -225,7 +228,7 @@ urlpatterns = (
|
||||
FlowExecutorView.as_view(),
|
||||
name="flow-executor",
|
||||
),
|
||||
path("sentry/", csrf_exempt(SentryTunnelView.as_view()), name="sentry"),
|
||||
path("schema/", SpectacularAPIView.as_view(), name="schema"),
|
||||
path("sentry/", SentryTunnelView.as_view(), name="sentry"),
|
||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||
]
|
||||
)
|
||||
|
||||
@ -67,7 +67,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Application Viewset"""
|
||||
|
||||
queryset = Application.objects.all()
|
||||
queryset = Application.objects.all().prefetch_related("provider")
|
||||
serializer_class = ApplicationSerializer
|
||||
search_fields = [
|
||||
"name",
|
||||
|
||||
@ -11,6 +11,7 @@ from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from ua_parser import user_agent_parser
|
||||
|
||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.geo import GEOIP_READER, GeoIPDict
|
||||
@ -102,11 +103,8 @@ class AuthenticatedSessionViewSet(
|
||||
search_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
|
||||
ordering = ["user__username"]
|
||||
filter_backends = [
|
||||
DjangoFilterBackend,
|
||||
OrderingFilter,
|
||||
SearchFilter,
|
||||
]
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user if self.request else get_anonymous_user()
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
from django.db.models.query import QuerySet
|
||||
from django_filters.filters import ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from rest_framework.fields import BooleanField, CharField, JSONField
|
||||
from rest_framework.fields import CharField, JSONField
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
@ -15,7 +15,6 @@ from authentik.core.models import Group, User
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
is_superuser = BooleanField(read_only=True)
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
@ -29,7 +28,6 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
"name",
|
||||
"is_active",
|
||||
"last_login",
|
||||
"is_superuser",
|
||||
"email",
|
||||
"avatar",
|
||||
"attributes",
|
||||
@ -81,7 +79,7 @@ class GroupFilter(FilterSet):
|
||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Group Viewset"""
|
||||
|
||||
queryset = Group.objects.all()
|
||||
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
||||
serializer_class = GroupSerializer
|
||||
search_fields = ["name", "is_superuser"]
|
||||
filterset_class = GroupFilter
|
||||
|
||||
@ -95,7 +95,9 @@ class SourceViewSet(
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_settings(self, request: Request) -> Response:
|
||||
"""Get all sources the user can configure"""
|
||||
_all_sources: Iterable[Source] = Source.objects.filter(enabled=True).select_subclasses()
|
||||
_all_sources: Iterable[Source] = (
|
||||
Source.objects.filter(enabled=True).select_subclasses().order_by("name")
|
||||
)
|
||||
matching_sources: list[UserSettingSerializer] = []
|
||||
for source in _all_sources:
|
||||
user_settings = source.ui_user_settings
|
||||
|
||||
@ -2,15 +2,19 @@
|
||||
from typing import Any
|
||||
|
||||
from django.http.response import Http404
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import UserSerializer
|
||||
@ -79,13 +83,23 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
"expires",
|
||||
"expiring",
|
||||
]
|
||||
ordering = ["expires"]
|
||||
ordering = ["identifier", "expires"]
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user if self.request else get_anonymous_user()
|
||||
if user.is_superuser:
|
||||
return super().get_queryset()
|
||||
return super().get_queryset().filter(user=user.pk)
|
||||
|
||||
def perform_create(self, serializer: TokenSerializer):
|
||||
serializer.save(
|
||||
if not self.request.user.is_superuser:
|
||||
return serializer.save(
|
||||
user=self.request.user,
|
||||
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
|
||||
)
|
||||
return super().perform_create(serializer)
|
||||
|
||||
@permission_required("authentik_core.view_token_key")
|
||||
@extend_schema(
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
"""User API Views"""
|
||||
from datetime import timedelta
|
||||
from json import loads
|
||||
from typing import Optional
|
||||
|
||||
@ -7,6 +8,8 @@ from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.http import urlencode
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
@ -271,9 +274,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
group.users.add(user)
|
||||
token = Token.objects.create(
|
||||
identifier=f"service-account-{username}-password",
|
||||
identifier=slugify(f"service-account-{username}-password"),
|
||||
intent=TokenIntents.INTENT_APP_PASSWORD,
|
||||
user=user,
|
||||
expires=now() + timedelta(days=360),
|
||||
)
|
||||
return Response({"username": user.username, "token": token.key})
|
||||
except (IntegrityError) as exc:
|
||||
|
||||
@ -184,7 +184,7 @@ class SourceFlowManager:
|
||||
# Ensure redirect is carried through when user was trying to
|
||||
# authorize application
|
||||
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
|
||||
NEXT_ARG_NAME, "authentik_core:if-admin"
|
||||
NEXT_ARG_NAME, "authentik_core:if-user"
|
||||
)
|
||||
kwargs.update(
|
||||
{
|
||||
@ -243,9 +243,9 @@ class SourceFlowManager:
|
||||
return self.handle_auth_user(connection)
|
||||
return redirect(
|
||||
reverse(
|
||||
"authentik_core:if-admin",
|
||||
"authentik_core:if-user",
|
||||
)
|
||||
+ f"#/user;page-{self.source.slug}"
|
||||
+ f"#/settings;page-{self.source.slug}"
|
||||
)
|
||||
|
||||
def handle_enroll(
|
||||
|
||||
@ -28,3 +28,7 @@ class PostUserEnrollmentStage(StageView):
|
||||
source=connection.source,
|
||||
).from_http(self.request)
|
||||
return self.executor.stage_ok()
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Wrapper for post requests"""
|
||||
return self.get(request)
|
||||
|
||||
@ -8,16 +8,15 @@
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
<title>{% block title %}{% trans title|default:tenant.branding_title %}{% endblock %}</title>
|
||||
<link rel="shortcut icon" type="image/png" href="{% static 'dist/assets/icons/icon.png' %}?v={{ ak_version }}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly-base.css' %}?v={{ ak_version }}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}?v={{ ak_version }}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}?v={{ ak_version }}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}?v={{ ak_version }}">
|
||||
<link rel="shortcut icon" type="image/png" href="{% static 'dist/assets/icons/icon.png' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly-base.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}">
|
||||
{% block head_before %}
|
||||
{% endblock %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}?v={{ ak_version }}">
|
||||
<script src="{% static 'dist/poly.js' %}?v={{ ak_version }}" type="module"></script>
|
||||
<script>window["polymerSkipLoadingFontRoboto"] = true;</script>
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
||||
{% block head %}
|
||||
{% endblock %}
|
||||
</head>
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/AdminInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
||||
<script src="{% static 'dist/AdminInterface.js' %}" type="module"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
@ -21,7 +21,7 @@ You've logged out of {{ application }}.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
||||
<a id="ak-back-home" href="{% url 'authentik_core:if-admin' %}" class="pf-c-button pf-m-primary">{% trans 'Go back to overview' %}</a>
|
||||
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">{% trans 'Go back to overview' %}</a>
|
||||
|
||||
<a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary">{% trans 'Log out of authentik' %}</a>
|
||||
|
||||
|
||||
@ -4,13 +4,14 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head_before %}
|
||||
{{ block.super }}
|
||||
{% if flow.compatibility_mode %}
|
||||
<script>ShadyDOM = { force: !navigator.webdriver };</script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
||||
<script src="{% static 'dist/FlowInterface.js' %}" type="module"></script>
|
||||
<style>
|
||||
.pf-c-background-image::before {
|
||||
--ak-flow-background: url("{{ flow.background_url }}");
|
||||
|
||||
28
authentik/core/templates/if/user.html
Normal file
28
authentik/core/templates/if/user.html
Normal file
@ -0,0 +1,28 @@
|
||||
{% extends "base/skeleton.html" %}
|
||||
|
||||
{% load static %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/UserInterface.js' %}" type="module"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<ak-message-container></ak-message-container>
|
||||
<ak-interface-user>
|
||||
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||
<div class="pf-c-empty-state__content">
|
||||
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
|
||||
<span class="pf-c-spinner__clipper"></span>
|
||||
<span class="pf-c-spinner__lead-ball"></span>
|
||||
<span class="pf-c-spinner__tail-ball"></span>
|
||||
</span>
|
||||
<h1 class="pf-c-title pf-m-lg">
|
||||
{% trans "Loading..." %}
|
||||
</h1>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</ak-interface-user>
|
||||
{% endblock %}
|
||||
@ -4,7 +4,7 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head_before %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
|
||||
@ -58,4 +58,4 @@ class TestImpersonation(TestCase):
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
response = self.client.get(reverse("authentik_core:impersonate-end"))
|
||||
self.assertRedirects(response, reverse("authentik_core:if-admin"))
|
||||
self.assertRedirects(response, reverse("authentik_core:if-user"))
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""Test token API"""
|
||||
from json import loads
|
||||
|
||||
from django.urls.base import reverse
|
||||
from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
@ -13,7 +15,8 @@ class TestTokenAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.user = User.objects.create(username="testuser")
|
||||
self.admin = User.objects.get(username="akadmin")
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_token_create(self):
|
||||
@ -55,3 +58,29 @@ class TestTokenAPI(APITestCase):
|
||||
clean_expired_models.delay().get()
|
||||
token.refresh_from_db()
|
||||
self.assertNotEqual(key, token.key)
|
||||
|
||||
def test_list(self):
|
||||
"""Test Token List (Test normal authentication)"""
|
||||
token_should: Token = Token.objects.create(
|
||||
identifier="test", expiring=False, user=self.user
|
||||
)
|
||||
Token.objects.create(identifier="test-2", expiring=False, user=get_anonymous_user())
|
||||
response = self.client.get(reverse(("authentik_api:token-list")))
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 1)
|
||||
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
|
||||
|
||||
def test_list_admin(self):
|
||||
"""Test Token List (Test with admin auth)"""
|
||||
self.client.force_login(self.admin)
|
||||
token_should: Token = Token.objects.create(
|
||||
identifier="test", expiring=False, user=self.user
|
||||
)
|
||||
token_should_not: Token = Token.objects.create(
|
||||
identifier="test-2", expiring=False, user=get_anonymous_user()
|
||||
)
|
||||
response = self.client.get(reverse(("authentik_api:token-list")))
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
|
||||
self.assertEqual(body["results"][1]["identifier"], token_should_not.identifier)
|
||||
|
||||
@ -12,7 +12,7 @@ from authentik.core.views.session import EndSessionView
|
||||
urlpatterns = [
|
||||
path(
|
||||
"",
|
||||
login_required(RedirectView.as_view(pattern_name="authentik_core:if-admin")),
|
||||
login_required(RedirectView.as_view(pattern_name="authentik_core:if-user")),
|
||||
name="root-redirect",
|
||||
),
|
||||
# Impersonation
|
||||
@ -32,6 +32,11 @@ urlpatterns = [
|
||||
ensure_csrf_cookie(TemplateView.as_view(template_name="if/admin.html")),
|
||||
name="if-admin",
|
||||
),
|
||||
path(
|
||||
"if/user/",
|
||||
ensure_csrf_cookie(TemplateView.as_view(template_name="if/user.html")),
|
||||
name="if-user",
|
||||
),
|
||||
path(
|
||||
"if/flow/<slug:flow_slug>/",
|
||||
ensure_csrf_cookie(FlowInterfaceView.as_view()),
|
||||
|
||||
@ -28,7 +28,7 @@ class ImpersonateInitView(View):
|
||||
|
||||
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
|
||||
|
||||
return redirect("authentik_core:if-admin")
|
||||
return redirect("authentik_core:if-user")
|
||||
|
||||
|
||||
class ImpersonateEndView(View):
|
||||
@ -41,7 +41,7 @@ class ImpersonateEndView(View):
|
||||
or SESSION_IMPERSONATE_ORIGINAL_USER not in request.session
|
||||
):
|
||||
LOGGER.debug("Can't end impersonation", user=request.user)
|
||||
return redirect("authentik_core:if-admin")
|
||||
return redirect("authentik_core:if-user")
|
||||
|
||||
original_user = request.session[SESSION_IMPERSONATE_ORIGINAL_USER]
|
||||
|
||||
|
||||
@ -78,9 +78,7 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
||||
@property
|
||||
def kid(self):
|
||||
"""Get Key ID used for JWKS"""
|
||||
return "{0}".format(
|
||||
md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
|
||||
)
|
||||
return md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Certificate-Key Pair {self.name}"
|
||||
|
||||
@ -1,8 +1,13 @@
|
||||
"""Notification API Views"""
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
@ -53,3 +58,18 @@ class NotificationViewSet(
|
||||
]
|
||||
permission_classes = [OwnerPermissions]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
204: OpenApiResponse(description="Marked tasks as read successfully."),
|
||||
},
|
||||
)
|
||||
@action(detail=False, methods=["post"])
|
||||
def mark_all_seen(self, request: Request) -> Response:
|
||||
"""Mark all the user's notifications as seen"""
|
||||
notifications = Notification.objects.filter(user=request.user)
|
||||
for notification in notifications:
|
||||
notification.seen = True
|
||||
Notification.objects.bulk_update(notifications, ["seen"])
|
||||
return Response({}, status=204)
|
||||
|
||||
28
authentik/events/api/notification_mapping.py
Normal file
28
authentik/events/api/notification_mapping.py
Normal file
@ -0,0 +1,28 @@
|
||||
"""NotificationWebhookMapping API Views"""
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.models import NotificationWebhookMapping
|
||||
|
||||
|
||||
class NotificationWebhookMappingSerializer(ModelSerializer):
|
||||
"""NotificationWebhookMapping Serializer"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = NotificationWebhookMapping
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"expression",
|
||||
]
|
||||
|
||||
|
||||
class NotificationWebhookMappingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""NotificationWebhookMapping Viewset"""
|
||||
|
||||
queryset = NotificationWebhookMapping.objects.all()
|
||||
serializer_class = NotificationWebhookMappingSerializer
|
||||
filterset_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
@ -38,6 +38,7 @@ class NotificationTransportSerializer(ModelSerializer):
|
||||
"mode",
|
||||
"mode_verbose",
|
||||
"webhook_url",
|
||||
"webhook_mapping",
|
||||
"send_once",
|
||||
]
|
||||
|
||||
|
||||
@ -1,10 +1,7 @@
|
||||
"""authentik events app"""
|
||||
from datetime import timedelta
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import ProgrammingError
|
||||
from django.utils.timezone import now
|
||||
|
||||
|
||||
class AuthentikEventsConfig(AppConfig):
|
||||
@ -16,12 +13,3 @@ class AuthentikEventsConfig(AppConfig):
|
||||
|
||||
def ready(self):
|
||||
import_module("authentik.events.signals")
|
||||
try:
|
||||
from authentik.events.models import Event
|
||||
|
||||
date_from = now() - timedelta(days=1)
|
||||
|
||||
for event in Event.objects.filter(created__gte=date_from):
|
||||
event._set_prom_metrics()
|
||||
except ProgrammingError:
|
||||
pass
|
||||
|
||||
46
authentik/events/migrations/0018_auto_20210911_2217.py
Normal file
46
authentik/events/migrations/0018_auto_20210911_2217.py
Normal file
@ -0,0 +1,46 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-11 22:17
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0028_alter_token_intent"),
|
||||
("authentik_events", "0017_alter_event_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="NotificationWebhookMapping",
|
||||
fields=[
|
||||
(
|
||||
"propertymapping_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.propertymapping",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Notification Webhook Mapping",
|
||||
"verbose_name_plural": "Notification Webhook Mappings",
|
||||
},
|
||||
bases=("authentik_core.propertymapping",),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="notificationtransport",
|
||||
name="webhook_mapping",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||
to="authentik_events.notificationwebhookmapping",
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -2,25 +2,25 @@
|
||||
from datetime import timedelta
|
||||
from inspect import getmodule, stack
|
||||
from smtplib import SMTPException
|
||||
from typing import Optional, Union
|
||||
from typing import TYPE_CHECKING, Optional, Type, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.http import HttpRequest
|
||||
from django.http.request import QueryDict
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from prometheus_client import Gauge
|
||||
from requests import RequestException, post
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import __version__
|
||||
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
|
||||
from authentik.core.models import ExpiringModel, Group, User
|
||||
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.lib.utils.http import get_client_ip, get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
@ -28,11 +28,8 @@ from authentik.tenants.models import Tenant
|
||||
from authentik.tenants.utils import DEFAULT_TENANT
|
||||
|
||||
LOGGER = get_logger("authentik.events")
|
||||
GAUGE_EVENTS = Gauge(
|
||||
"authentik_events",
|
||||
"Events in authentik",
|
||||
["action", "user_username", "app", "client_ip"],
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
|
||||
def default_event_duration():
|
||||
@ -143,8 +140,9 @@ class Event(ExpiringModel):
|
||||
`user` arguments optionally overrides user from requests."""
|
||||
if request:
|
||||
self.context["http_request"] = {
|
||||
"path": request.get_full_path(),
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"args": QueryDict(request.META.get("QUERY_STRING", "")),
|
||||
}
|
||||
if hasattr(request, "tenant"):
|
||||
tenant: Tenant = request.tenant
|
||||
@ -182,14 +180,6 @@ class Event(ExpiringModel):
|
||||
return
|
||||
self.context["geo"] = city
|
||||
|
||||
def _set_prom_metrics(self):
|
||||
GAUGE_EVENTS.labels(
|
||||
action=self.action,
|
||||
user_username=self.user.get("username"),
|
||||
app=self.app,
|
||||
client_ip=self.client_ip,
|
||||
).set(self.created.timestamp())
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
LOGGER.debug(
|
||||
@ -200,7 +190,6 @@ class Event(ExpiringModel):
|
||||
user=self.user,
|
||||
)
|
||||
super().save(*args, **kwargs)
|
||||
self._set_prom_metrics()
|
||||
|
||||
@property
|
||||
def summary(self) -> str:
|
||||
@ -235,6 +224,9 @@ class NotificationTransport(models.Model):
|
||||
mode = models.TextField(choices=TransportMode.choices)
|
||||
|
||||
webhook_url = models.TextField(blank=True)
|
||||
webhook_mapping = models.ForeignKey(
|
||||
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
|
||||
)
|
||||
send_once = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_(
|
||||
@ -254,15 +246,22 @@ class NotificationTransport(models.Model):
|
||||
|
||||
def send_webhook(self, notification: "Notification") -> list[str]:
|
||||
"""Send notification to generic webhook"""
|
||||
try:
|
||||
response = post(
|
||||
self.webhook_url,
|
||||
json={
|
||||
default_body = {
|
||||
"body": notification.body,
|
||||
"severity": notification.severity,
|
||||
"user_email": notification.user.email,
|
||||
"user_username": notification.user.username,
|
||||
},
|
||||
}
|
||||
if self.webhook_mapping:
|
||||
default_body = self.webhook_mapping.evaluate(
|
||||
user=notification.user,
|
||||
request=None,
|
||||
notification=notification,
|
||||
)
|
||||
try:
|
||||
response = get_http_session().post(
|
||||
self.webhook_url,
|
||||
json=default_body,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
@ -312,7 +311,7 @@ class NotificationTransport(models.Model):
|
||||
if notification.event:
|
||||
body["attachments"][0]["title"] = notification.event.action
|
||||
try:
|
||||
response = post(self.webhook_url, json=body)
|
||||
response = get_http_session().post(self.webhook_url, json=body)
|
||||
response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
text = exc.response.text if exc.response else str(exc)
|
||||
@ -429,3 +428,25 @@ class NotificationRule(PolicyBindingModel):
|
||||
|
||||
verbose_name = _("Notification Rule")
|
||||
verbose_name_plural = _("Notification Rules")
|
||||
|
||||
|
||||
class NotificationWebhookMapping(PropertyMapping):
|
||||
"""Modify the schema and layout of the webhook being sent"""
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-property-mapping-notification-form"
|
||||
|
||||
@property
|
||||
def serializer(self) -> Type["Serializer"]:
|
||||
from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer
|
||||
|
||||
return NotificationWebhookMappingSerializer
|
||||
|
||||
def __str__(self):
|
||||
return f"Notification Webhook Mapping {self.name}"
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Notification Webhook Mapping")
|
||||
verbose_name_plural = _("Notification Webhook Mappings")
|
||||
|
||||
@ -3,7 +3,6 @@ from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from timeit import default_timer
|
||||
from traceback import format_tb
|
||||
from typing import Any, Optional
|
||||
|
||||
from celery import Task
|
||||
@ -42,8 +41,7 @@ class TaskResult:
|
||||
|
||||
def with_error(self, exc: Exception) -> "TaskResult":
|
||||
"""Since errors might not always be pickle-able, set the traceback"""
|
||||
self.messages.extend(format_tb(exc.__traceback__))
|
||||
self.messages.append(str(exc))
|
||||
self.messages.extend(exception_to_string(exc).splitlines())
|
||||
return self
|
||||
|
||||
|
||||
|
||||
@ -4,15 +4,15 @@ from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.models import Event, EventAction, Notification, NotificationSeverity
|
||||
|
||||
|
||||
class TestEventsAPI(APITestCase):
|
||||
"""Test Event API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
user = User.objects.get(username="akadmin")
|
||||
self.client.force_login(user)
|
||||
self.user = User.objects.get(username="akadmin")
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_top_n(self):
|
||||
"""Test top_per_user"""
|
||||
@ -30,3 +30,14 @@ class TestEventsAPI(APITestCase):
|
||||
reverse("authentik_api:event-actions"),
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_notifications(self):
|
||||
"""Test notifications"""
|
||||
notification = Notification.objects.create(
|
||||
user=self.user, severity=NotificationSeverity.ALERT, body="", seen=False
|
||||
)
|
||||
self.client.post(
|
||||
reverse("authentik_api:notification-mark-all-seen"),
|
||||
)
|
||||
notification.refresh_from_db()
|
||||
self.assertTrue(notification.seen)
|
||||
|
||||
@ -86,7 +86,7 @@ class StageViewSet(
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_settings(self, request: Request) -> Response:
|
||||
"""Get all stages the user can configure"""
|
||||
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses()
|
||||
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses().order_by("name")
|
||||
matching_stages: list[dict] = []
|
||||
for stage in _all_stages:
|
||||
user_settings = stage.ui_user_settings
|
||||
|
||||
31
authentik/flows/tests/test_stage_views.py
Normal file
31
authentik/flows/tests/test_stage_views.py
Normal file
@ -0,0 +1,31 @@
|
||||
"""stage view tests"""
|
||||
from typing import Callable, Type
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.views import FlowExecutorView
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
class TestViews(TestCase):
|
||||
"""Generic model properties tests"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.factory = RequestFactory()
|
||||
self.exec = FlowExecutorView(request=self.factory.get("/"))
|
||||
|
||||
|
||||
def view_tester_factory(view_class: Type[StageView]) -> Callable:
|
||||
"""Test a form"""
|
||||
|
||||
def tester(self: TestViews):
|
||||
model_class = view_class(self.exec)
|
||||
self.assertIsNotNone(model_class.post)
|
||||
self.assertIsNotNone(model_class.get)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
for view in all_subclasses(StageView):
|
||||
setattr(TestViews, f"test_view_{view.__name__}", view_tester_factory(view))
|
||||
@ -14,12 +14,7 @@ from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||
from django.views.generic import View
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import (
|
||||
OpenApiParameter,
|
||||
OpenApiResponse,
|
||||
PolymorphicProxySerializer,
|
||||
extend_schema,
|
||||
)
|
||||
from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer, extend_schema
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.views import APIView
|
||||
from sentry_sdk import capture_exception
|
||||
@ -213,9 +208,6 @@ class FlowExecutorView(APIView):
|
||||
serializers=challenge_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
404: OpenApiResponse(
|
||||
description="No Token found"
|
||||
), # This error can be raised by the email stage
|
||||
},
|
||||
request=OpenApiTypes.NONE,
|
||||
parameters=[
|
||||
|
||||
@ -9,7 +9,9 @@ postgresql:
|
||||
web:
|
||||
listen: 0.0.0.0:9000
|
||||
listen_tls: 0.0.0.0:9443
|
||||
listen_metrics: 0.0.0.0:9300
|
||||
load_local_files: false
|
||||
outpost_port_offset: 0
|
||||
|
||||
redis:
|
||||
host: localhost
|
||||
@ -54,6 +56,7 @@ outposts:
|
||||
# %(build_hash)s: Build hash if you're running a beta version
|
||||
docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s"
|
||||
|
||||
disable_update_check: false
|
||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||
geoip: "./GeoLite2-City.mmdb"
|
||||
|
||||
|
||||
@ -4,13 +4,13 @@ from textwrap import indent
|
||||
from typing import Any, Iterable, Optional
|
||||
|
||||
from django.core.exceptions import FieldError
|
||||
from requests import Session
|
||||
from rest_framework.serializers import ValidationError
|
||||
from sentry_sdk.hub import Hub
|
||||
from sentry_sdk.tracing import Span
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -35,7 +35,7 @@ class BaseEvaluator:
|
||||
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||
"ak_logger": get_logger(),
|
||||
"requests": Session(),
|
||||
"requests": get_http_session(),
|
||||
}
|
||||
self._context = {}
|
||||
self._filename = "BaseEvalautor"
|
||||
|
||||
@ -93,6 +93,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
if "exc_info" in hint:
|
||||
_, exc_value, _ = hint["exc_info"]
|
||||
if isinstance(exc_value, ignored_classes):
|
||||
LOGGER.debug("dropping exception", exception=exc_value)
|
||||
return None
|
||||
if "logger" in event:
|
||||
if event["logger"] in [
|
||||
|
||||
@ -1,9 +1,13 @@
|
||||
"""http helpers"""
|
||||
from os import environ
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
from requests.sessions import Session
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
|
||||
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||
OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
|
||||
DEFAULT_IP = "255.255.255.255"
|
||||
@ -60,3 +64,16 @@ def get_client_ip(request: Optional[HttpRequest]) -> str:
|
||||
if override:
|
||||
return override
|
||||
return _get_client_ip_from_meta(request.META)
|
||||
|
||||
|
||||
def authentik_user_agent() -> str:
|
||||
"""Get a common user agent"""
|
||||
build = environ.get(ENV_GIT_HASH_KEY, "tagged")
|
||||
return f"authentik@{__version__} (build={build})"
|
||||
|
||||
|
||||
def get_http_session() -> Session:
|
||||
"""Get a requests session with common headers"""
|
||||
session = Session()
|
||||
session.headers["User-Agent"] = authentik_user_agent()
|
||||
return session
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
"""authentik lib reflection utilities"""
|
||||
from importlib import import_module
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@ -19,12 +20,12 @@ def all_subclasses(cls, sort=True):
|
||||
return classes
|
||||
|
||||
|
||||
def class_to_path(cls):
|
||||
def class_to_path(cls: type) -> str:
|
||||
"""Turn Class (Class or instance) into module path"""
|
||||
return f"{cls.__module__}.{cls.__name__}"
|
||||
|
||||
|
||||
def path_to_class(path):
|
||||
def path_to_class(path: Union[str, None]) -> Union[type, None]:
|
||||
"""Import module and return class"""
|
||||
if not path:
|
||||
return None
|
||||
|
||||
@ -15,7 +15,7 @@ from authentik.core.channels import AuthJsonConsumer
|
||||
from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState
|
||||
|
||||
GAUGE_OUTPOSTS_CONNECTED = Gauge(
|
||||
"authentik_outposts_connected", "Currently connected outposts", ["outpost", "uid"]
|
||||
"authentik_outposts_connected", "Currently connected outposts", ["outpost", "uid", "expected"]
|
||||
)
|
||||
GAUGE_OUTPOSTS_LAST_UPDATE = Gauge(
|
||||
"authentik_outposts_last_update",
|
||||
@ -76,6 +76,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
GAUGE_OUTPOSTS_CONNECTED.labels(
|
||||
outpost=self.outpost.name,
|
||||
uid=self.last_uid,
|
||||
expected=self.outpost.config.kubernetes_replicas,
|
||||
).dec()
|
||||
LOGGER.debug(
|
||||
"removed outpost instance from cache",
|
||||
@ -100,6 +101,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
GAUGE_OUTPOSTS_CONNECTED.labels(
|
||||
outpost=self.outpost.name,
|
||||
uid=self.last_uid,
|
||||
expected=self.outpost.config.kubernetes_replicas,
|
||||
).inc()
|
||||
LOGGER.debug(
|
||||
"added outpost instace to cache",
|
||||
|
||||
@ -164,11 +164,9 @@ class DockerController(BaseController):
|
||||
self.down()
|
||||
return self.up(depth + 1)
|
||||
# Check that container is healthy
|
||||
if (
|
||||
container.status == "running"
|
||||
and container.attrs.get("State", {}).get("Health", {}).get("Status", "")
|
||||
!= "healthy"
|
||||
):
|
||||
if container.status == "running" and container.attrs.get("State", {}).get(
|
||||
"Health", {}
|
||||
).get("Status", "") not in ["healthy", "starting"]:
|
||||
# At this point we know the config is correct, but the container isn't healthy,
|
||||
# so we just restart it with the same config
|
||||
if has_been_created:
|
||||
|
||||
@ -3,8 +3,8 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec
|
||||
|
||||
from authentik.outposts.controllers.base import FIELD_MANAGER, DeploymentPort
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler, NeedsUpdate
|
||||
from authentik.outposts.controllers.base import FIELD_MANAGER
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler, NeedsRecreate
|
||||
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -21,44 +21,13 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
||||
def reconcile(self, current: V1Service, reference: V1Service):
|
||||
super().reconcile(current, reference)
|
||||
if len(current.spec.ports) != len(reference.spec.ports):
|
||||
raise NeedsUpdate()
|
||||
raise NeedsRecreate()
|
||||
for port in reference.spec.ports:
|
||||
if port not in current.spec.ports:
|
||||
raise NeedsUpdate()
|
||||
|
||||
def get_embedded_reference_object(self) -> V1Service:
|
||||
"""Get Service for embedded outpost"""
|
||||
selector_labels = {
|
||||
"app.kubernetes.io/name": "authentik",
|
||||
"app.kubernetes.io/component": "server",
|
||||
}
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
ports = []
|
||||
for port in [
|
||||
DeploymentPort(9000, "http", "tcp"),
|
||||
DeploymentPort(9443, "https", "tcp"),
|
||||
]:
|
||||
ports.append(
|
||||
V1ServicePort(
|
||||
name=port.name,
|
||||
port=port.port,
|
||||
protocol=port.protocol.upper(),
|
||||
target_port=port.inner_port or port.port,
|
||||
)
|
||||
)
|
||||
return V1Service(
|
||||
metadata=meta,
|
||||
spec=V1ServiceSpec(
|
||||
ports=ports,
|
||||
selector=selector_labels,
|
||||
type=self.controller.outpost.config.kubernetes_service_type,
|
||||
),
|
||||
)
|
||||
raise NeedsRecreate()
|
||||
|
||||
def get_reference_object(self) -> V1Service:
|
||||
"""Get deployment object for outpost"""
|
||||
if self.is_embedded:
|
||||
return self.get_embedded_reference_object()
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
ports = []
|
||||
for port in self.controller.deployment_ports:
|
||||
@ -70,6 +39,12 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
||||
target_port=port.inner_port or port.port,
|
||||
)
|
||||
)
|
||||
if self.is_embedded:
|
||||
selector_labels = {
|
||||
"app.kubernetes.io/name": "authentik",
|
||||
"app.kubernetes.io/component": "server",
|
||||
}
|
||||
else:
|
||||
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
|
||||
return V1Service(
|
||||
metadata=meta,
|
||||
|
||||
150
authentik/outposts/controllers/k8s/service_monitor.py
Normal file
150
authentik/outposts/controllers/k8s/service_monitor.py
Normal file
@ -0,0 +1,150 @@
|
||||
"""Kubernetes Prometheus ServiceMonitor Reconciler"""
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from dacite import from_dict
|
||||
from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi
|
||||
|
||||
from authentik.outposts.controllers.base import FIELD_MANAGER
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.outposts.controllers.kubernetes import KubernetesController
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrometheusServiceMonitorSpecEndpoint:
|
||||
"""Prometheus ServiceMonitor endpoint spec"""
|
||||
|
||||
port: str
|
||||
path: str = field(default="/metrics")
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrometheusServiceMonitorSpecSelector:
|
||||
"""Prometheus ServiceMonitor selector spec"""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
matchLabels: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrometheusServiceMonitorSpec:
|
||||
"""Prometheus ServiceMonitor spec"""
|
||||
|
||||
endpoints: list[PrometheusServiceMonitorSpecEndpoint]
|
||||
# pylint: disable=invalid-name
|
||||
selector: PrometheusServiceMonitorSpecSelector
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrometheusServiceMonitorMetadata:
|
||||
"""Prometheus ServiceMonitor metadata"""
|
||||
|
||||
name: str
|
||||
namespace: str
|
||||
labels: dict = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrometheusServiceMonitor:
|
||||
"""Prometheus ServiceMonitor"""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
apiVersion: str
|
||||
kind: str
|
||||
metadata: PrometheusServiceMonitorMetadata
|
||||
spec: PrometheusServiceMonitorSpec
|
||||
|
||||
|
||||
CRD_NAME = "servicemonitors.monitoring.coreos.com"
|
||||
CRD_GROUP = "monitoring.coreos.com"
|
||||
CRD_VERSION = "v1"
|
||||
CRD_PLURAL = "servicemonitors"
|
||||
|
||||
|
||||
class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusServiceMonitor]):
|
||||
"""Kubernetes Prometheus ServiceMonitor Reconciler"""
|
||||
|
||||
def __init__(self, controller: "KubernetesController") -> None:
|
||||
super().__init__(controller)
|
||||
self.api_ex = ApiextensionsV1Api(controller.client)
|
||||
self.api = CustomObjectsApi(controller.client)
|
||||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return (not self._crd_exists()) or (self.is_embedded)
|
||||
|
||||
def _crd_exists(self) -> bool:
|
||||
"""Check if the Prometheus ServiceMonitor exists"""
|
||||
return bool(
|
||||
len(
|
||||
self.api_ex.list_custom_resource_definition(
|
||||
field_selector=f"metadata.name={CRD_NAME}"
|
||||
).items
|
||||
)
|
||||
)
|
||||
|
||||
def get_reference_object(self) -> PrometheusServiceMonitor:
|
||||
"""Get service monitor object for outpost"""
|
||||
return PrometheusServiceMonitor(
|
||||
apiVersion=f"{CRD_GROUP}/{CRD_VERSION}",
|
||||
kind="ServiceMonitor",
|
||||
metadata=PrometheusServiceMonitorMetadata(
|
||||
name=self.name,
|
||||
namespace=self.namespace,
|
||||
labels=self.get_object_meta().labels,
|
||||
),
|
||||
spec=PrometheusServiceMonitorSpec(
|
||||
endpoints=[
|
||||
PrometheusServiceMonitorSpecEndpoint(
|
||||
port="http-metrics",
|
||||
)
|
||||
],
|
||||
selector=PrometheusServiceMonitorSpecSelector(
|
||||
matchLabels=self.get_object_meta(name=self.name).labels,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
def create(self, reference: PrometheusServiceMonitor):
|
||||
return self.api.create_namespaced_custom_object(
|
||||
group=CRD_GROUP,
|
||||
version=CRD_VERSION,
|
||||
plural=CRD_PLURAL,
|
||||
namespace=self.namespace,
|
||||
body=asdict(reference),
|
||||
field_manager=FIELD_MANAGER,
|
||||
)
|
||||
|
||||
def delete(self, reference: PrometheusServiceMonitor):
|
||||
return self.api.delete_namespaced_custom_object(
|
||||
group=CRD_GROUP,
|
||||
version=CRD_VERSION,
|
||||
namespace=self.namespace,
|
||||
plural=CRD_PLURAL,
|
||||
name=self.name,
|
||||
)
|
||||
|
||||
def retrieve(self) -> PrometheusServiceMonitor:
|
||||
return from_dict(
|
||||
PrometheusServiceMonitor,
|
||||
self.api.get_namespaced_custom_object(
|
||||
group=CRD_GROUP,
|
||||
version=CRD_VERSION,
|
||||
namespace=self.namespace,
|
||||
plural=CRD_PLURAL,
|
||||
name=self.name,
|
||||
),
|
||||
)
|
||||
|
||||
def update(self, current: PrometheusServiceMonitor, reference: PrometheusServiceMonitor):
|
||||
return self.api.patch_namespaced_custom_object(
|
||||
group=CRD_GROUP,
|
||||
version=CRD_VERSION,
|
||||
namespace=self.namespace,
|
||||
plural=CRD_PLURAL,
|
||||
name=self.name,
|
||||
body=asdict(reference),
|
||||
field_manager=FIELD_MANAGER,
|
||||
)
|
||||
@ -13,6 +13,7 @@ from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
from authentik.outposts.controllers.k8s.secret import SecretReconciler
|
||||
from authentik.outposts.controllers.k8s.service import ServiceReconciler
|
||||
from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler
|
||||
from authentik.outposts.models import KubernetesServiceConnection, Outpost, ServiceConnectionInvalid
|
||||
|
||||
|
||||
@ -32,8 +33,9 @@ class KubernetesController(BaseController):
|
||||
"secret": SecretReconciler,
|
||||
"deployment": DeploymentReconciler,
|
||||
"service": ServiceReconciler,
|
||||
"prometheus servicemonitor": PrometheusServiceMonitorReconciler,
|
||||
}
|
||||
self.reconcile_order = ["secret", "deployment", "service"]
|
||||
self.reconcile_order = ["secret", "deployment", "service", "prometheus servicemonitor"]
|
||||
|
||||
def up(self):
|
||||
try:
|
||||
|
||||
@ -100,7 +100,7 @@ def outpost_controller(
|
||||
if from_cache:
|
||||
outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
|
||||
else:
|
||||
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
||||
outpost: Outpost = Outpost.objects.filter(pk=outpost_pk).first()
|
||||
if not outpost:
|
||||
return
|
||||
self.set_uid(slugify(outpost.name))
|
||||
@ -148,10 +148,7 @@ def outpost_post_save(model_class: str, model_pk: Any):
|
||||
return
|
||||
|
||||
if isinstance(instance, Outpost):
|
||||
LOGGER.debug("Ensuring token and permissions for outpost", instance=instance)
|
||||
_ = instance.token
|
||||
_ = instance.user
|
||||
LOGGER.debug("Trigger reconcile for outpost")
|
||||
LOGGER.debug("Trigger reconcile for outpost", instance=instance)
|
||||
outpost_controller.delay(instance.pk)
|
||||
|
||||
if isinstance(instance, (OutpostModel, Outpost)):
|
||||
|
||||
@ -81,11 +81,11 @@ class PolicyEngine:
|
||||
.iterator()
|
||||
)
|
||||
|
||||
def _check_policy_type(self, policy: Policy):
|
||||
def _check_policy_type(self, binding: PolicyBinding):
|
||||
"""Check policy type, make sure it's not the root class as that has no logic implemented"""
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
if policy.__class__ == Policy:
|
||||
raise TypeError(f"Policy '{policy}' is root type")
|
||||
if binding.policy is not None and binding.policy.__class__ == Policy:
|
||||
raise TypeError(f"Policy '{binding.policy}' is root type")
|
||||
|
||||
def build(self) -> "PolicyEngine":
|
||||
"""Build wrapper which monitors performance"""
|
||||
@ -102,7 +102,7 @@ class PolicyEngine:
|
||||
for binding in self._iter_bindings():
|
||||
self.__expected_result_count += 1
|
||||
|
||||
self._check_policy_type(binding.policy)
|
||||
self._check_policy_type(binding)
|
||||
key = cache_key(binding, self.request)
|
||||
cached_policy = cache.get(key, None)
|
||||
if cached_policy and self.use_cache:
|
||||
|
||||
@ -3,8 +3,10 @@ from ipaddress import ip_address, ip_network
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django_otp import devices_for_user
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.planner import PLAN_CONTEXT_SSO
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
@ -28,6 +30,7 @@ class PolicyEvaluator(BaseEvaluator):
|
||||
self._messages = []
|
||||
self._context["ak_logger"] = get_logger(policy_name)
|
||||
self._context["ak_message"] = self.expr_func_message
|
||||
self._context["ak_user_has_authenticator"] = self.expr_func_user_has_authenticator
|
||||
self._context["ip_address"] = ip_address
|
||||
self._context["ip_network"] = ip_network
|
||||
self._filename = policy_name or "PolicyEvaluator"
|
||||
@ -36,6 +39,19 @@ class PolicyEvaluator(BaseEvaluator):
|
||||
"""Wrapper to append to messages list, which is returned with PolicyResult"""
|
||||
self._messages.append(message)
|
||||
|
||||
def expr_func_user_has_authenticator(
|
||||
self, user: User, device_type: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Check if a user has any authenticator devices, optionally matching *device_type*"""
|
||||
user_devices = devices_for_user(user)
|
||||
if device_type:
|
||||
for device in user_devices:
|
||||
device_class = device.__class__.__name__.lower().replace("device", "")
|
||||
if device_class == device_type:
|
||||
return True
|
||||
return False
|
||||
return len(user_devices) > 0
|
||||
|
||||
def set_policy_request(self, request: PolicyRequest):
|
||||
"""Update context based on policy request (if http request is given, update that too)"""
|
||||
# update website/docs/expressions/_objects.md
|
||||
|
||||
@ -3,10 +3,10 @@ from hashlib import sha1
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext as _
|
||||
from requests import get
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.policies.models import Policy, PolicyResult
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
@ -49,7 +49,7 @@ class HaveIBeenPwendPolicy(Policy):
|
||||
|
||||
pw_hash = sha1(password.encode("utf-8")).hexdigest() # nosec
|
||||
url = f"https://api.pwnedpasswords.com/range/{pw_hash[:5]}"
|
||||
result = get(url).text
|
||||
result = get_http_session().get(url).text
|
||||
final_count = 0
|
||||
for line in result.split("\r\n"):
|
||||
full_hash, count = line.split(":")
|
||||
|
||||
@ -59,18 +59,21 @@ class PasswordPolicy(Policy):
|
||||
password = request.context[PLAN_CONTEXT_PROMPT][self.password_field]
|
||||
|
||||
if len(password) < self.length_min:
|
||||
LOGGER.debug("password failed", reason="length", p=password)
|
||||
LOGGER.debug("password failed", reason="length")
|
||||
return PolicyResult(False, self.error_message)
|
||||
|
||||
if self.amount_lowercase > 0 and len(RE_LOWER.findall(password)) < self.amount_lowercase:
|
||||
LOGGER.debug("password failed", reason="amount_lowercase", p=password)
|
||||
LOGGER.debug("password failed", reason="amount_lowercase")
|
||||
return PolicyResult(False, self.error_message)
|
||||
if self.amount_uppercase > 0 and len(RE_UPPER.findall(password)) < self.amount_lowercase:
|
||||
LOGGER.debug("password failed", reason="amount_uppercase", p=password)
|
||||
LOGGER.debug("password failed", reason="amount_uppercase")
|
||||
return PolicyResult(False, self.error_message)
|
||||
regex = re.compile(r"[%s]" % self.symbol_charset)
|
||||
if self.amount_symbols > 0 and len(regex.findall(password)) < self.amount_symbols:
|
||||
LOGGER.debug("password failed", reason="amount_symbols", p=password)
|
||||
if self.amount_symbols > 0:
|
||||
count = 0
|
||||
for symbol in self.symbol_charset:
|
||||
count += password.count(symbol)
|
||||
if count < self.amount_symbols:
|
||||
LOGGER.debug("password failed", reason="amount_symbols")
|
||||
return PolicyResult(False, self.error_message)
|
||||
|
||||
return PolicyResult(True)
|
||||
|
||||
@ -29,7 +29,19 @@ class LDAPProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
queryset = LDAPProvider.objects.all()
|
||||
serializer_class = LDAPProviderSerializer
|
||||
filterset_fields = "__all__"
|
||||
filterset_fields = {
|
||||
"application": ["isnull"],
|
||||
"name": ["iexact"],
|
||||
"authorization_flow__slug": ["iexact"],
|
||||
"base_dn": ["iexact"],
|
||||
"search_group__group_uuid": ["iexact"],
|
||||
"search_group__name": ["iexact"],
|
||||
"certificate__kp_uuid": ["iexact"],
|
||||
"certificate__name": ["iexact"],
|
||||
"tls_server_name": ["iexact"],
|
||||
"uid_start_number": ["iexact"],
|
||||
"gid_start_number": ["iexact"],
|
||||
}
|
||||
ordering = ["name"]
|
||||
|
||||
|
||||
|
||||
@ -12,4 +12,5 @@ class LDAPKubernetesController(KubernetesController):
|
||||
self.deployment_ports = [
|
||||
DeploymentPort(389, "ldap", "tcp", 3389),
|
||||
DeploymentPort(636, "ldaps", "tcp", 6636),
|
||||
DeploymentPort(9300, "http-metrics", "tcp", 9300),
|
||||
]
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
"""OAuth2Provider API Views"""
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
@ -23,7 +25,7 @@ class ScopeMappingSerializer(PropertyMappingSerializer):
|
||||
class ScopeMappingFilter(FilterSet):
|
||||
"""Filter for ScopeMapping"""
|
||||
|
||||
managed = AllValuesMultipleFilter(field_name="managed")
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = ScopeMapping
|
||||
|
||||
@ -151,12 +151,13 @@ class AuthorizeError(OAuth2Error):
|
||||
# http://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthError
|
||||
hash_or_question = "#" if self.grant_type == GrantTypes.IMPLICIT else "?"
|
||||
|
||||
uri = "{0}{1}error={2}&error_description={3}".format(
|
||||
self.redirect_uri, hash_or_question, self.error, description
|
||||
uri = (
|
||||
f"{self.redirect_uri}{hash_or_question}error="
|
||||
f"{self.error}&error_description={description}"
|
||||
)
|
||||
|
||||
# Add state if present.
|
||||
uri = uri + ("&state={0}".format(self.state) if self.state else "")
|
||||
uri = uri + (f"&state={self.state}" if self.state else "")
|
||||
|
||||
return uri
|
||||
|
||||
|
||||
@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-08 15:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.lib.utils.time
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_oauth2", "0016_alter_authorizationcode_nonce"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="oauth2provider",
|
||||
name="token_validity",
|
||||
field=models.TextField(
|
||||
default="days=30",
|
||||
help_text="Tokens not valid on or after current time + this value (Format: hours=1;minutes=2;seconds=3).",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -182,7 +182,7 @@ class OAuth2Provider(Provider):
|
||||
),
|
||||
)
|
||||
token_validity = models.TextField(
|
||||
default="minutes=10",
|
||||
default="days=30",
|
||||
validators=[timedelta_string_validator],
|
||||
help_text=_(
|
||||
(
|
||||
|
||||
@ -247,7 +247,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
"to": (
|
||||
f"http://localhost#access_token={token.access_token}"
|
||||
f"&id_token={provider.encode(token.id_token.to_dict())}&token_type=bearer"
|
||||
f"&expires_in=600&state={state}"
|
||||
f"&expires_in=60&state={state}"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
@ -141,7 +141,7 @@ class TestToken(OAuthTestCase):
|
||||
"access_token": new_token.access_token,
|
||||
"refresh_token": new_token.refresh_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": 600,
|
||||
"expires_in": 2592000,
|
||||
"id_token": provider.encode(
|
||||
new_token.id_token.to_dict(),
|
||||
),
|
||||
@ -190,7 +190,7 @@ class TestToken(OAuthTestCase):
|
||||
"access_token": new_token.access_token,
|
||||
"refresh_token": new_token.refresh_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": 600,
|
||||
"expires_in": 2592000,
|
||||
"id_token": provider.encode(
|
||||
new_token.id_token.to_dict(),
|
||||
),
|
||||
@ -236,7 +236,7 @@ class TestToken(OAuthTestCase):
|
||||
"access_token": new_token.access_token,
|
||||
"refresh_token": new_token.refresh_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": 600,
|
||||
"expires_in": 2592000,
|
||||
"id_token": provider.encode(
|
||||
new_token.id_token.to_dict(),
|
||||
),
|
||||
|
||||
@ -238,6 +238,10 @@ class OAuthFulfillmentStage(StageView):
|
||||
parsed = urlparse(uri)
|
||||
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Wrapper when this stage gets hit with a post request"""
|
||||
return self.get(request, *args, **kwargs)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""final Stage of an OAuth2 Flow"""
|
||||
@ -367,7 +371,7 @@ class OAuthFulfillmentStage(StageView):
|
||||
|
||||
query_fragment["token_type"] = "bearer"
|
||||
query_fragment["expires_in"] = int(
|
||||
timedelta_from_string(self.provider.token_validity).total_seconds()
|
||||
timedelta_from_string(self.provider.access_code_validity).total_seconds()
|
||||
)
|
||||
query_fragment["state"] = self.params.state if self.params.state else ""
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
"""ProxyProvider API Views"""
|
||||
from typing import Any
|
||||
|
||||
from drf_spectacular.utils import extend_schema_field, extend_schema_serializer
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, ListField, SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
@ -72,6 +72,7 @@ class ProxyProviderSerializer(ProviderSerializer):
|
||||
"mode",
|
||||
"redirect_uris",
|
||||
"cookie_domain",
|
||||
"token_validity",
|
||||
]
|
||||
|
||||
|
||||
@ -80,11 +81,27 @@ class ProxyProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
queryset = ProxyProvider.objects.all()
|
||||
serializer_class = ProxyProviderSerializer
|
||||
filterset_fields = "__all__"
|
||||
filterset_fields = {
|
||||
"application": ["isnull"],
|
||||
"name": ["iexact"],
|
||||
"authorization_flow__slug": ["iexact"],
|
||||
"property_mappings": ["iexact"],
|
||||
"internal_host": ["iexact"],
|
||||
"external_host": ["iexact"],
|
||||
"internal_host_ssl_validation": ["iexact"],
|
||||
"certificate__kp_uuid": ["iexact"],
|
||||
"certificate__name": ["iexact"],
|
||||
"skip_path_regex": ["iexact"],
|
||||
"basic_auth_enabled": ["iexact"],
|
||||
"basic_auth_password_attribute": ["iexact"],
|
||||
"basic_auth_user_attribute": ["iexact"],
|
||||
"mode": ["iexact"],
|
||||
"redirect_uris": ["iexact"],
|
||||
"cookie_domain": ["iexact"],
|
||||
}
|
||||
ordering = ["name"]
|
||||
|
||||
|
||||
@extend_schema_serializer(deprecate_fields=["forward_auth_mode"])
|
||||
class ProxyOutpostConfigSerializer(ModelSerializer):
|
||||
"""Proxy provider serializer for outposts"""
|
||||
|
||||
|
||||
@ -13,8 +13,8 @@ class ProxyDockerController(DockerController):
|
||||
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
|
||||
super().__init__(outpost, connection)
|
||||
self.deployment_ports = [
|
||||
DeploymentPort(4180, "http", "tcp"),
|
||||
DeploymentPort(4443, "https", "tcp"),
|
||||
DeploymentPort(9000, "http", "tcp"),
|
||||
DeploymentPort(9443, "https", "tcp"),
|
||||
]
|
||||
|
||||
def _get_labels(self) -> dict[str, str]:
|
||||
@ -30,5 +30,5 @@ class ProxyDockerController(DockerController):
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.service"] = f"{traefik_name}-service"
|
||||
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"] = "/"
|
||||
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.server.port"] = "4180"
|
||||
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.server.port"] = "9000"
|
||||
return labels
|
||||
|
||||
@ -96,7 +96,6 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware])
|
||||
|
||||
def get_reference_object(self) -> TraefikMiddleware:
|
||||
"""Get deployment object for outpost"""
|
||||
port = 9000 if self.is_embedded else 4180
|
||||
return TraefikMiddleware(
|
||||
apiVersion=f"{CRD_GROUP}/{CRD_VERSION}",
|
||||
kind="Middleware",
|
||||
@ -107,7 +106,7 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware])
|
||||
),
|
||||
spec=TraefikMiddlewareSpec(
|
||||
forwardAuth=TraefikMiddlewareSpecForwardAuth(
|
||||
address=f"http://{self.name}.{self.namespace}:{port}/akprox/auth?traefik",
|
||||
address=f"http://{self.name}.{self.namespace}:9000/akprox/auth/traefik",
|
||||
authResponseHeaders=[
|
||||
"Set-Cookie",
|
||||
"X-Auth-Username",
|
||||
|
||||
@ -12,8 +12,9 @@ class ProxyKubernetesController(KubernetesController):
|
||||
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
|
||||
super().__init__(outpost, connection)
|
||||
self.deployment_ports = [
|
||||
DeploymentPort(4180, "http", "tcp"),
|
||||
DeploymentPort(4443, "https", "tcp"),
|
||||
DeploymentPort(9000, "http", "tcp"),
|
||||
DeploymentPort(9300, "http-metrics", "tcp"),
|
||||
DeploymentPort(9443, "https", "tcp"),
|
||||
]
|
||||
self.reconcilers["ingress"] = IngressReconciler
|
||||
self.reconcilers["traefik middleware"] = TraefikMiddlewareReconciler
|
||||
|
||||
23
authentik/providers/proxy/migrations/0014_proxy_v2.py
Normal file
23
authentik/providers/proxy/migrations/0014_proxy_v2.py
Normal file
@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.6 on 2021-09-09 11:24
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_defaults(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from authentik.providers.proxy.models import JWTAlgorithms, ProxyProvider
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for provider in ProxyProvider.objects.using(db_alias).filter(jwt_alg=JWTAlgorithms.RS256):
|
||||
provider.set_oauth_defaults()
|
||||
provider.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_proxy", "0013_mode"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(migrate_defaults)]
|
||||
@ -128,8 +128,8 @@ class ProxyProvider(OutpostModel, OAuth2Provider):
|
||||
def set_oauth_defaults(self):
|
||||
"""Ensure all OAuth2-related settings are correct"""
|
||||
self.client_type = ClientTypes.CONFIDENTIAL
|
||||
self.jwt_alg = JWTAlgorithms.RS256
|
||||
self.rsa_key = CertificateKeyPair.objects.exclude(key_data__iexact="").first()
|
||||
self.jwt_alg = JWTAlgorithms.HS256
|
||||
self.rsa_key = None
|
||||
scopes = ScopeMapping.objects.filter(
|
||||
scope_name__in=[
|
||||
SCOPE_OPENID,
|
||||
@ -139,12 +139,7 @@ class ProxyProvider(OutpostModel, OAuth2Provider):
|
||||
]
|
||||
)
|
||||
self.property_mappings.set(scopes)
|
||||
self.redirect_uris = "\n".join(
|
||||
[
|
||||
_get_callback_url(self.external_host),
|
||||
_get_callback_url(self.internal_host),
|
||||
]
|
||||
)
|
||||
self.redirect_uris = _get_callback_url(self.external_host)
|
||||
|
||||
def __str__(self):
|
||||
return f"Proxy Provider {self.name}"
|
||||
|
||||
@ -9,7 +9,12 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from drf_spectacular.utils import (
|
||||
OpenApiParameter,
|
||||
OpenApiResponse,
|
||||
extend_schema,
|
||||
extend_schema_field,
|
||||
)
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, FileField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
@ -185,7 +190,7 @@ class SAMLPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
class SAMLPropertyMappingFilter(FilterSet):
|
||||
"""Filter for SAMLPropertyMapping"""
|
||||
|
||||
managed = AllValuesMultipleFilter(field_name="managed")
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = SAMLPropertyMapping
|
||||
|
||||
@ -59,6 +59,10 @@ class AuthNRequestParser:
|
||||
) -> AuthNRequest:
|
||||
root = ElementTree.fromstring(decoded_xml)
|
||||
|
||||
if "AssertionConsumerServiceURL" not in root.attrib:
|
||||
msg = "Missing 'AssertionConsumerServiceURL' attribute"
|
||||
LOGGER.warning(msg)
|
||||
raise CannotHandleAssertion(msg)
|
||||
request_acs_url = root.attrib["AssertionConsumerServiceURL"]
|
||||
|
||||
if self.provider.acs_url.lower() != request_acs_url.lower():
|
||||
@ -66,7 +70,7 @@ class AuthNRequestParser:
|
||||
f"ACS URL of {request_acs_url} doesn't match Provider "
|
||||
f"ACS URL of {self.provider.acs_url}."
|
||||
)
|
||||
LOGGER.info(msg)
|
||||
LOGGER.warning(msg)
|
||||
raise CannotHandleAssertion(msg)
|
||||
|
||||
auth_n_request = AuthNRequest(id=root.attrib["ID"], relay_state=relay_state)
|
||||
|
||||
@ -4,6 +4,7 @@ from getpass import getuser
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from structlog.stdlib import get_logger
|
||||
@ -42,7 +43,7 @@ class Command(BaseCommand):
|
||||
user=user,
|
||||
intent=TokenIntents.INTENT_RECOVERY,
|
||||
description=f"Recovery Token generated by {getuser()} on {_now}",
|
||||
identifier=f"ak-recovery-{user}-{_now}",
|
||||
identifier=slugify(f"ak-recovery-{user}-{_now}"),
|
||||
)
|
||||
self.stdout.write(
|
||||
(f"Store this link safely, as it will allow" f" anyone to access authentik as {user}.")
|
||||
|
||||
@ -22,4 +22,4 @@ class UseTokenView(View):
|
||||
login(request, token.user, backend=BACKEND_INBUILT)
|
||||
token.delete()
|
||||
messages.warning(request, _("Used recovery-link to authenticate."))
|
||||
return redirect("authentik_core:if-admin")
|
||||
return redirect("authentik_core:if-user")
|
||||
|
||||
@ -13,7 +13,6 @@ from defusedxml import defuse_stdlib
|
||||
from django.core.asgi import get_asgi_application
|
||||
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
|
||||
|
||||
from authentik.root.asgi.error_handler import ASGIErrorHandler
|
||||
from authentik.root.asgi.logger import ASGILogger
|
||||
|
||||
# DJANGO_SETTINGS_MODULE is set in gunicorn.conf.py
|
||||
@ -24,8 +23,7 @@ django.setup()
|
||||
# pylint: disable=wrong-import-position
|
||||
from authentik.root import websocket # noqa # isort:skip
|
||||
|
||||
application = ASGIErrorHandler(
|
||||
ASGILogger(
|
||||
application = ASGILogger(
|
||||
guarantee_single_callable(
|
||||
SentryAsgiMiddleware(
|
||||
ProtocolTypeRouter(
|
||||
@ -37,4 +35,3 @@ application = ASGIErrorHandler(
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@ -1,38 +0,0 @@
|
||||
"""ASGI Error handler"""
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.root.asgi.types import ASGIApp, Receive, Scope, Send
|
||||
|
||||
LOGGER = get_logger("authentik.asgi")
|
||||
|
||||
|
||||
class ASGIErrorHandler:
|
||||
"""ASGI Error handler"""
|
||||
|
||||
app: ASGIApp
|
||||
|
||||
def __init__(self, app: ASGIApp):
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
||||
try:
|
||||
return await self.app(scope, receive, send)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
LOGGER.warning("Fatal ASGI exception", exc=exc)
|
||||
return await self.error_handler(scope, send)
|
||||
|
||||
async def error_handler(self, scope: Scope, send: Send) -> None:
|
||||
"""Return a generic error message"""
|
||||
if scope.get("scheme", "http") == "http":
|
||||
return await send(
|
||||
{
|
||||
"type": "http.request",
|
||||
"body": b"Internal server error",
|
||||
"more_body": False,
|
||||
}
|
||||
)
|
||||
return await send(
|
||||
{
|
||||
"type": "websocket.close",
|
||||
}
|
||||
)
|
||||
@ -19,22 +19,24 @@ class ASGILogger:
|
||||
|
||||
app: ASGIApp
|
||||
|
||||
status_code: int
|
||||
start: float
|
||||
|
||||
def __init__(self, app: ASGIApp):
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
||||
content_length = 0
|
||||
status_code = 0
|
||||
request_id = ""
|
||||
location = ""
|
||||
start = time()
|
||||
|
||||
async def send_hooked(message: Message) -> None:
|
||||
"""Hooked send method, which records status code and content-length, and for the final
|
||||
requests logs it"""
|
||||
|
||||
headers = dict(message.get("headers", []))
|
||||
if "status" in message:
|
||||
self.status_code = message["status"]
|
||||
nonlocal status_code
|
||||
status_code = message["status"]
|
||||
|
||||
if b"Content-Length" in headers:
|
||||
nonlocal content_length
|
||||
@ -43,23 +45,27 @@ class ASGILogger:
|
||||
if message["type"] == "http.response.start":
|
||||
response_headers = dict(message["headers"])
|
||||
nonlocal request_id
|
||||
nonlocal location
|
||||
request_id = response_headers.get(RESPONSE_HEADER_ID.encode(), b"").decode()
|
||||
location = response_headers.get(b"Location", b"").decode()
|
||||
|
||||
if message["type"] == "http.response.body" and not message.get("more_body", True):
|
||||
runtime = int((time() - self.start) * 1000)
|
||||
self.log(scope, runtime, content_length, request_id=request_id)
|
||||
nonlocal start
|
||||
runtime = int((time() - start) * 1000)
|
||||
kwargs = {"request_id": request_id}
|
||||
if location != "":
|
||||
kwargs["location"] = location
|
||||
self.log(scope, runtime, content_length, status_code, **kwargs)
|
||||
await send(message)
|
||||
|
||||
self.start = time()
|
||||
if scope["type"] == "lifespan":
|
||||
# https://code.djangoproject.com/ticket/31508
|
||||
# https://github.com/encode/uvicorn/issues/266
|
||||
return
|
||||
return await self.app(scope, receive, send_hooked)
|
||||
|
||||
def _get_ip(self, scope: Scope) -> str:
|
||||
def _get_ip(self, headers: dict[bytes, bytes], scope: Scope) -> str:
|
||||
client_ip = None
|
||||
headers = dict(scope.get("headers", []))
|
||||
for header in ASGI_IP_HEADERS:
|
||||
if header in headers:
|
||||
client_ip = headers[header].decode()
|
||||
@ -68,9 +74,10 @@ class ASGILogger:
|
||||
# Check if header has multiple values, and use the first one
|
||||
return client_ip.split(", ")[0]
|
||||
|
||||
def log(self, scope: Scope, content_length: int, runtime: float, **kwargs):
|
||||
def log(self, scope: Scope, content_length: int, runtime: float, status_code: int, **kwargs):
|
||||
"""Outpot access logs in a structured format"""
|
||||
host = self._get_ip(scope)
|
||||
headers = dict(scope.get("headers", []))
|
||||
host = self._get_ip(headers, scope)
|
||||
query_string = ""
|
||||
if scope.get("query_string", b"") != b"":
|
||||
query_string = f"?{scope.get('query_string').decode()}"
|
||||
@ -79,8 +86,9 @@ class ASGILogger:
|
||||
host=host,
|
||||
method=scope.get("method", ""),
|
||||
scheme=scope.get("scheme", ""),
|
||||
status=self.status_code,
|
||||
status=status_code,
|
||||
size=content_length / 1000 if content_length > 0 else 0,
|
||||
runtime=runtime,
|
||||
user_agent=headers.get(b"user-agent", b"").decode(),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -3,10 +3,20 @@ import os
|
||||
from logging.config import dictConfig
|
||||
|
||||
from celery import Celery
|
||||
from celery.signals import after_task_publish, setup_logging, task_postrun, task_prerun
|
||||
from celery.signals import (
|
||||
after_task_publish,
|
||||
setup_logging,
|
||||
task_failure,
|
||||
task_internal_error,
|
||||
task_postrun,
|
||||
task_prerun,
|
||||
)
|
||||
from django.conf import settings
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.sentry import before_send
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "authentik.root.settings")
|
||||
|
||||
@ -43,6 +53,18 @@ def task_postrun_hook(task_id, task, *args, retval=None, state=None, **kwargs):
|
||||
LOGGER.debug("Task finished", task_id=task_id, task_name=task.__name__, state=state)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@task_failure.connect
|
||||
@task_internal_error.connect
|
||||
def task_error_hook(task_id, exception: Exception, traceback, *args, **kwargs):
|
||||
"""Create system event for failed task"""
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
LOGGER.warning("Task failure", exception=exception)
|
||||
if before_send({}, {"exc_info": (None, exception, None)}) is not None:
|
||||
Event.new(EventAction.SYSTEM_EXCEPTION, message=exception_to_string(exception)).save()
|
||||
|
||||
|
||||
# Using a string here means the worker doesn't have to serialize
|
||||
# the configuration object to child processes.
|
||||
# - namespace='CELERY' means all celery-related configuration keys
|
||||
|
||||
@ -24,7 +24,7 @@ class SessionMiddleware(UpstreamSessionMiddleware):
|
||||
# Since go does not consider localhost with http a secure origin
|
||||
# we can't set the secure flag.
|
||||
user_agent = request.META.get("HTTP_USER_AGENT", "")
|
||||
if user_agent.startswith("authentik-outpost@"):
|
||||
if user_agent.startswith("authentik-outpost@") or "safari" in user_agent.lower():
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -154,10 +154,10 @@ SPECTACULAR_SETTINGS = {
|
||||
"SCHEMA_PATH_PREFIX_TRIM": True,
|
||||
"SERVERS": [
|
||||
{
|
||||
"url": "http://authentik.tld/api/v3/",
|
||||
"url": "/api/v3/",
|
||||
},
|
||||
{
|
||||
"url": "http://authentik.tld/api/v2beta/",
|
||||
"url": "/api/v2beta/",
|
||||
},
|
||||
],
|
||||
"CONTACT": {
|
||||
@ -372,7 +372,7 @@ CELERY_RESULT_BACKEND = (
|
||||
# Database backup
|
||||
DBBACKUP_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||
DBBACKUP_STORAGE_OPTIONS = {"location": "./backups" if DEBUG else "/backups"}
|
||||
DBBACKUP_FILENAME_TEMPLATE = "authentik-backup-{datetime}.sql"
|
||||
DBBACKUP_FILENAME_TEMPLATE = f"authentik-backup-{__version__}-{{datetime}}.sql"
|
||||
DBBACKUP_CONNECTOR_MAPPING = {
|
||||
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
||||
}
|
||||
@ -529,7 +529,7 @@ for _app in INSTALLED_APPS:
|
||||
if "apps" in _app:
|
||||
_app = ".".join(_app.split(".")[:-2])
|
||||
try:
|
||||
app_settings = importlib.import_module("%s.settings" % _app)
|
||||
app_settings = importlib.import_module(f"{_app}.settings")
|
||||
INSTALLED_APPS.extend(getattr(app_settings, "INSTALLED_APPS", []))
|
||||
MIDDLEWARE.extend(getattr(app_settings, "MIDDLEWARE", []))
|
||||
AUTHENTICATION_BACKENDS.extend(getattr(app_settings, "AUTHENTICATION_BACKENDS", []))
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from tests.e2e.utils import get_docker_tag
|
||||
|
||||
|
||||
class PytestTestRunner: # pragma: no cover
|
||||
@ -17,7 +18,7 @@ class PytestTestRunner: # pragma: no cover
|
||||
CONFIG.y_set("authentik.geoip", "tests/GeoLite2-City-Test.mmdb")
|
||||
CONFIG.y_set(
|
||||
"outposts.docker_image_base",
|
||||
"beryju.org/authentik/outpost-%(type)s:gh-master",
|
||||
f"beryju.org/authentik/outpost-%(type)s:{get_docker_tag()}",
|
||||
)
|
||||
|
||||
def run_tests(self, test_labels):
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
"""Source API Views"""
|
||||
from typing import Any
|
||||
|
||||
from django.http.response import Http404
|
||||
from django.utils.text import slugify
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.request import Request
|
||||
@ -18,6 +18,9 @@ from authentik.core.api.sources import SourceSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
||||
|
||||
|
||||
class LDAPSourceSerializer(SourceSerializer):
|
||||
@ -94,19 +97,24 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: TaskSerializer(many=False),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
200: TaskSerializer(many=True),
|
||||
}
|
||||
)
|
||||
@action(methods=["GET"], detail=True)
|
||||
@action(methods=["GET"], detail=True, pagination_class=None, filter_backends=[])
|
||||
# pylint: disable=unused-argument
|
||||
def sync_status(self, request: Request, slug: str) -> Response:
|
||||
"""Get source's sync status"""
|
||||
source = self.get_object()
|
||||
task = TaskInfo.by_name(f"ldap_sync_{slugify(source.name)}")
|
||||
if not task:
|
||||
raise Http404
|
||||
return Response(TaskSerializer(task, many=False).data)
|
||||
results = []
|
||||
for sync_class in [
|
||||
UserLDAPSynchronizer,
|
||||
GroupLDAPSynchronizer,
|
||||
MembershipLDAPSynchronizer,
|
||||
]:
|
||||
task = TaskInfo.by_name(f"ldap_sync_{slugify(source.name)}-{sync_class.__name__}")
|
||||
if task:
|
||||
results.append(task)
|
||||
return Response(TaskSerializer(results, many=True).data)
|
||||
|
||||
|
||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
@ -122,7 +130,7 @@ class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
class LDAPPropertyMappingFilter(FilterSet):
|
||||
"""Filter for LDAPPropertyMapping"""
|
||||
|
||||
managed = AllValuesMultipleFilter(field_name="managed")
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
|
||||
@ -4,7 +4,7 @@ from celery.schedules import crontab
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"sources_ldap_sync": {
|
||||
"task": "authentik.sources.ldap.tasks.ldap_sync_all",
|
||||
"schedule": crontab(minute="*/60"), # Run every hour
|
||||
"schedule": crontab(minute="*/120"), # Run every other hour
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,8 +11,12 @@ from authentik.core.models import User
|
||||
from authentik.core.signals import password_changed
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.password import LDAPPasswordChanger
|
||||
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
||||
from authentik.sources.ldap.tasks import ldap_sync
|
||||
from authentik.stages.prompt.signals import password_validate
|
||||
|
||||
@ -22,7 +26,12 @@ from authentik.stages.prompt.signals import password_validate
|
||||
def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
||||
"""Ensure that source is synced on save (if enabled)"""
|
||||
if instance.enabled:
|
||||
ldap_sync.delay(instance.pk)
|
||||
for sync_class in [
|
||||
UserLDAPSynchronizer,
|
||||
GroupLDAPSynchronizer,
|
||||
MembershipLDAPSynchronizer,
|
||||
]:
|
||||
ldap_sync.delay(instance.pk, class_to_path(sync_class))
|
||||
|
||||
|
||||
@receiver(password_validate)
|
||||
|
||||
@ -17,11 +17,18 @@ class BaseLDAPSynchronizer:
|
||||
|
||||
_source: LDAPSource
|
||||
_logger: BoundLogger
|
||||
_messages: list[str]
|
||||
|
||||
def __init__(self, source: LDAPSource):
|
||||
self._source = source
|
||||
self._messages = []
|
||||
self._logger = get_logger().bind(source=source, syncer=self.__class__.__name__)
|
||||
|
||||
@property
|
||||
def messages(self) -> list[str]:
|
||||
"""Get all UI messages"""
|
||||
return self._messages
|
||||
|
||||
@property
|
||||
def base_dn_users(self) -> str:
|
||||
"""Shortcut to get full base_dn for user lookups"""
|
||||
@ -36,6 +43,11 @@ class BaseLDAPSynchronizer:
|
||||
return f"{self._source.additional_group_dn},{self._source.base_dn}"
|
||||
return self._source.base_dn
|
||||
|
||||
def message(self, *args, **kwargs):
|
||||
"""Add message that is later added to the System Task and shown to the user"""
|
||||
self._messages.append(" ".join(args))
|
||||
self._logger.warning(*args, **kwargs)
|
||||
|
||||
def sync(self) -> int:
|
||||
"""Sync function, implemented in subclass"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@ -15,7 +15,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
def sync(self) -> int:
|
||||
"""Iterate over all LDAP Groups and create authentik_core.Group instances"""
|
||||
if not self._source.sync_groups:
|
||||
self._logger.warning("Group syncing is disabled for this Source")
|
||||
self.message("Group syncing is disabled for this Source")
|
||||
return -1
|
||||
groups = self._source.connection.extend.standard.paged_search(
|
||||
search_base=self.base_dn_groups,
|
||||
@ -28,8 +28,8 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
attributes = group.get("attributes", {})
|
||||
group_dn = self._flatten(self._flatten(group.get("entryDN", group.get("dn"))))
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self._logger.warning(
|
||||
"Cannot find uniqueness Field in attributes",
|
||||
self.message(
|
||||
f"Cannot find uniqueness field in attributes: '{group_dn}",
|
||||
attributes=attributes.keys(),
|
||||
dn=group_dn,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user