Compare commits
429 Commits
version/20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
3665e2fefa | |||
3dbe35cf9e | |||
c7f0ea8a4b | |||
0620324702 | |||
5a802bcf83 | |||
00c8054893 | |||
dc2538f59d | |||
5a0e78c698 | |||
fd4e8a59f4 | |||
dd1a6a81c8 | |||
84dfbcaaae | |||
e649e9fb03 | |||
266ef66a6f | |||
842fdb0b0c | |||
a270a84aae | |||
36f7cad23b | |||
e441ac1e43 | |||
24f2932777 | |||
a6c6f22221 | |||
abd5db8ad4 | |||
124ce80694 | |||
4352960f83 | |||
4e2443d60b | |||
34a8408a4f | |||
17b65adcc5 | |||
6f8d129dea | |||
59f339beda | |||
ce1c400022 | |||
c99afe0ad4 | |||
ff9ff18c11 | |||
4d11d82c6e | |||
b4d750174f | |||
fd44765ff4 | |||
190ebb27e4 | |||
fb3c04d0c7 | |||
3ba8de61e0 | |||
d4d2be84a3 | |||
96ea7ae09c | |||
172bfceb31 | |||
932b19999e | |||
0f1cc86e71 | |||
788fd00390 | |||
f602e202b8 | |||
9b60fcb08b | |||
a293a14f2a | |||
65bfa589eb | |||
defca51d24 | |||
d862028134 | |||
c19d7c37aa | |||
6fb3102d25 | |||
51e3453dca | |||
6f58fdf158 | |||
5d4051f547 | |||
219b8d1a57 | |||
c7d4e69669 | |||
cd629dfbaa | |||
8eaaaae2a7 | |||
3d0a853449 | |||
c2f8ff55cf | |||
4b52697cfe | |||
80fae44f47 | |||
afd7af557d | |||
73eb97ca6e | |||
ebe90d8886 | |||
a1a1b113b1 | |||
9adf8e88ba | |||
72d87ee51d | |||
9654285535 | |||
6e47e69c62 | |||
1ba89a02ee | |||
1fb3642701 | |||
847d97b813 | |||
253060def2 | |||
2e70ea799a | |||
7364914ae8 | |||
1f1d322958 | |||
e4841ce1a4 | |||
af30b781b6 | |||
5f490c563e | |||
e33a5528f7 | |||
d4de243e3b | |||
317117ee68 | |||
40d03a6124 | |||
9cfeeb35ba | |||
b7d828702d | |||
19dfeec782 | |||
07eef2869f | |||
f7fd31cc84 | |||
465d9c2b93 | |||
04aae8f584 | |||
bbca90c93a | |||
dda1d4e0fb | |||
f072c600cc | |||
65b8a5bb8d | |||
92537a6c8d | |||
72836ecd9d | |||
251a97c77e | |||
7f7046f0e4 | |||
20e59158c2 | |||
9a9e55ae32 | |||
481260a5ca | |||
436adcce2e | |||
cd3f02fd3b | |||
7abfd24150 | |||
d3feab9463 | |||
189427609f | |||
d76a9c211a | |||
ef7d9c4d35 | |||
70c25692eb | |||
71b31a2812 | |||
d4493c0ee9 | |||
3208358a03 | |||
a6a8eddf7c | |||
8c0a87b710 | |||
2f88c435fb | |||
5cad59a9f8 | |||
5ac6a6910e | |||
d751a7fc4c | |||
f1fd223bc7 | |||
e75712fa09 | |||
1b87375661 | |||
545a114450 | |||
02b06838e2 | |||
6868b7722c | |||
1e303b515b | |||
34a9a6a389 | |||
7a1935b4e2 | |||
bf60b33d03 | |||
9bb50fd556 | |||
5e7521915a | |||
7b0cda3a6a | |||
db5279f952 | |||
9fc072e4df | |||
55ea9afeec | |||
9485f0b8cc | |||
fabdb6448f | |||
e629079352 | |||
e6dfa8294e | |||
e5a5a5c603 | |||
4d07da5ffa | |||
5b4f34fd5f | |||
2e05047151 | |||
459a6ea437 | |||
ea7f9f291f | |||
241d790e69 | |||
83e08f12ae | |||
6526659b51 | |||
6c3b7c8d3e | |||
d51ecc4554 | |||
ef63e35ad2 | |||
4e9176ed2e | |||
d1296e9cc7 | |||
d85e0593f1 | |||
20c1f15dc0 | |||
c864f4e312 | |||
202ad1a3ac | |||
979a5f800e | |||
c151faeff6 | |||
b3a3852a54 | |||
e401b4e74e | |||
9538ad5710 | |||
49bf82a0a4 | |||
e6fdec4c8e | |||
73b87a5e3d | |||
303b847cdc | |||
0386c0dd7b | |||
7f1b9cdeb2 | |||
252bb04dd3 | |||
3fbcfb48fb | |||
69f7198976 | |||
c74c8b2083 | |||
63d4f598e4 | |||
ded6b6f937 | |||
225099b1a1 | |||
6b7a32548d | |||
c71d415456 | |||
c03f0d1d7c | |||
ac9cac302c | |||
701c140cfd | |||
ca5761652c | |||
553872e8dd | |||
adc9b67a9c | |||
fa2ff5fc2b | |||
d5cab5d580 | |||
9e3b5d313b | |||
be8b2bf6f6 | |||
3f8cd7ff13 | |||
b266a2cdfb | |||
9a15a66d85 | |||
446f104c90 | |||
2cad9a3d07 | |||
ee48b8c225 | |||
a91649a7d4 | |||
ca89201bd8 | |||
e3a8fc0746 | |||
5e3a6b802b | |||
e8d9f992b9 | |||
260b2c8ca8 | |||
751e77fa9e | |||
86c2a5d69d | |||
1a02049104 | |||
32934fcd38 | |||
d84d7c26ca | |||
2f6e6a3123 | |||
36b674349a | |||
038ef67745 | |||
53831fa354 | |||
be39673f29 | |||
0f8dbfcc9c | |||
ba57bf4fa2 | |||
b1c9126832 | |||
e674f03064 | |||
08451c15f4 | |||
99d161e212 | |||
940ccf9ea8 | |||
08cce2ca4e | |||
4acbda2b77 | |||
83cfb5f8c2 | |||
0d370ef0a9 | |||
a335ca0895 | |||
8a666535a8 | |||
e6431593f7 | |||
928c2bf0d6 | |||
68388e9551 | |||
5d26fa0403 | |||
42f9ba8efe | |||
0440ad7c09 | |||
3ebc531ae2 | |||
ca3b5fa2a2 | |||
0f0a5b0621 | |||
51835887ab | |||
09bcbcc2ac | |||
8a76d6a21b | |||
48ab436444 | |||
18a53a9e23 | |||
6725569ba8 | |||
812be495a5 | |||
dbc3df1f63 | |||
07b001bc2b | |||
c012bed379 | |||
d330e9ee7f | |||
be21a5d172 | |||
ea2f623955 | |||
6fc38436f4 | |||
35faf269db | |||
e56c3fc54c | |||
5891fb3ad6 | |||
1041718e27 | |||
2507c0eec9 | |||
5ea9601062 | |||
c0e6a6c614 | |||
4523550422 | |||
988cf15b71 | |||
6ae660aea4 | |||
f201ce8059 | |||
59624ed45c | |||
3e78baf2d7 | |||
08c67b2a2c | |||
01d29134b9 | |||
55250e88e5 | |||
f1b100c8a5 | |||
19708bc67b | |||
40a885aaaa | |||
c529340d6c | |||
c317efa14c | |||
379fcf9c1f | |||
e10a7b48b7 | |||
3e666de91d | |||
333758d91f | |||
50678a9e2e | |||
eb8f52b870 | |||
3ee90712b2 | |||
e4eadf8080 | |||
26ebaf16fc | |||
d0ed372af0 | |||
cc8b2d7dfe | |||
61a212371f | |||
9ce49c2089 | |||
34c45900c2 | |||
bf7d110af3 | |||
4e5eeacf0a | |||
e7b498e8b4 | |||
b55cb2b40c | |||
25c001f2cd | |||
2a409215d3 | |||
ad8ee83697 | |||
d1d28722d2 | |||
1efd09fcd5 | |||
35f0e6b88d | |||
a6e528d209 | |||
bb2c4423b0 | |||
ad9f29566b | |||
e76bb6bc13 | |||
a68642779d | |||
3c04fcaa9f | |||
5955d28073 | |||
a6fb6161d7 | |||
6b0e0610c6 | |||
2c70301f56 | |||
07b9923bf6 | |||
8b3923200d | |||
3dcd67c1a3 | |||
2a9feafb90 | |||
580e88c6fc | |||
d82c01aa61 | |||
1af3357826 | |||
ed49d7824e | |||
378402fcf0 | |||
50f0c11c0b | |||
58712828a4 | |||
b2b9093c95 | |||
afa2afe1d4 | |||
d7631e8af0 | |||
6e625f7400 | |||
f54ead2b45 | |||
c4e4e17f93 | |||
43c87f87c3 | |||
4da0c81f44 | |||
9b70aaa717 | |||
5769eb277c | |||
26f60b3e85 | |||
7d8ed06539 | |||
4d858c64e0 | |||
6f0792ccfe | |||
04f06e00ff | |||
776c3128b8 | |||
e9e0992dce | |||
69af788b0f | |||
ceace0282b | |||
ccef7b4233 | |||
cad6c42fdd | |||
d2abe6d455 | |||
68d120b3b4 | |||
48c0c0baca | |||
7b29a1e485 | |||
fe28d216fe | |||
e36fb6641e | |||
972471ce79 | |||
38edd76949 | |||
cd07c12c1b | |||
3ce8b836dc | |||
d27dfcc1e3 | |||
1d5958a78f | |||
b6e0a1d8f4 | |||
2a122845d9 | |||
21c7787eed | |||
fae4d34131 | |||
7ff7bfeb58 | |||
983604265b | |||
f8d6daa928 | |||
6fc26aca72 | |||
29da7dd8d6 | |||
91ca90f700 | |||
b3c8ffb96c | |||
b35d9ae8b0 | |||
302b047f1a | |||
dcd80c6d63 | |||
d741ed430a | |||
8436738b0f | |||
5b150657f5 | |||
f89479caf3 | |||
2f3bf5efe7 | |||
5fb07acf54 | |||
99d0d4e8de | |||
afc5dc5543 | |||
9341787fe7 | |||
6c9b3ebd2b | |||
a525d6c3a9 | |||
b59b9314e4 | |||
7687b744cc | |||
9fb41b8d10 | |||
51ffdcb5cb | |||
4d6cd4c57d | |||
41c5f01422 | |||
e567cd5580 | |||
5f81909bab | |||
d03b43605e | |||
ea187d4e81 | |||
502ac51fa7 | |||
4bc6fd28d4 | |||
820c9e7d06 | |||
e5a8714e6a | |||
d56d6ea3a9 | |||
5f58a4566c | |||
d616bdd5d6 | |||
5112ef9331 | |||
7a49377caf | |||
5b3941a425 | |||
c1ab5c5556 | |||
3282b34431 | |||
392d9bb10b | |||
82f6c515ea | |||
d67d5f73c5 | |||
799d186510 | |||
3983b7fbe4 | |||
d75284a587 | |||
71e4936dc3 | |||
9d3b6f7a4d | |||
003df44a34 | |||
a7598c6ee5 | |||
0891e43040 | |||
1f49aea48d | |||
499b52df6a | |||
b8a566f4a0 | |||
aa0e8edb8b | |||
0e35bb18c7 | |||
4a06ebf4f9 | |||
11584af425 | |||
a31da9e1d3 | |||
8d6d49834b | |||
2825710262 | |||
7346ccf2b7 | |||
57072dd6ce | |||
fec098a823 | |||
73950b72e5 | |||
b40afb9b7d | |||
1f783dfc01 | |||
7ccf8bcdc8 | |||
76131e40ec | |||
5955394c1d | |||
a8998a6356 | |||
dc75d7b7f0 | |||
34a191f216 | |||
299931985e | |||
b946fbf9e7 | |||
5db3409efc | |||
649db054a6 | |||
4f5e1fb86b | |||
15d5b91642 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2021.4.3
|
current_version = 2021.5.1-rc8
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||||
@ -19,20 +19,14 @@ values =
|
|||||||
|
|
||||||
[bumpversion:file:website/docs/installation/docker-compose.md]
|
[bumpversion:file:website/docs/installation/docker-compose.md]
|
||||||
|
|
||||||
[bumpversion:file:website/docs/installation/kubernetes.md]
|
|
||||||
|
|
||||||
[bumpversion:file:docker-compose.yml]
|
[bumpversion:file:docker-compose.yml]
|
||||||
|
|
||||||
[bumpversion:file:helm/values.yaml]
|
|
||||||
|
|
||||||
[bumpversion:file:helm/README.md]
|
|
||||||
|
|
||||||
[bumpversion:file:helm/Chart.yaml]
|
|
||||||
|
|
||||||
[bumpversion:file:.github/workflows/release.yml]
|
[bumpversion:file:.github/workflows/release.yml]
|
||||||
|
|
||||||
[bumpversion:file:authentik/__init__.py]
|
[bumpversion:file:authentik/__init__.py]
|
||||||
|
|
||||||
|
[bumpversion:file:internal/constants/constants.go]
|
||||||
|
|
||||||
[bumpversion:file:outpost/pkg/version.go]
|
[bumpversion:file:outpost/pkg/version.go]
|
||||||
|
|
||||||
[bumpversion:file:web/src/constants.ts]
|
[bumpversion:file:web/src/constants.ts]
|
||||||
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -1,5 +1,13 @@
|
|||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "04:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
assignees:
|
||||||
|
- BeryJu
|
||||||
- package-ecosystem: gomod
|
- package-ecosystem: gomod
|
||||||
directory: "/outpost"
|
directory: "/outpost"
|
||||||
schedule:
|
schedule:
|
||||||
|
155
.github/workflows/release.yml
vendored
155
.github/workflows/release.yml
vendored
@ -3,32 +3,49 @@ name: authentik-on-release
|
|||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published, created]
|
types: [published, created]
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- version-*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Build
|
# Build
|
||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1.1.0
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
env:
|
uses: docker/login-action@v1
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
with:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
run: docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: prepare ts api client
|
||||||
|
run: |
|
||||||
|
docker run --rm -v $(pwd):/local openapitools/openapi-generator-cli generate -i /local/swagger.yaml -g typescript-fetch -o /local/web/api --additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
run: docker build
|
uses: docker/build-push-action@v2
|
||||||
--no-cache
|
with:
|
||||||
-t beryju/authentik:2021.4.3
|
push: ${{ github.event_name == 'release' }}
|
||||||
-t beryju/authentik:latest
|
tags: |
|
||||||
-f Dockerfile .
|
beryju/authentik:2021.5.1-rc8,
|
||||||
- name: Push Docker Container to Registry (versioned)
|
beryju/authentik:latest,
|
||||||
run: docker push beryju/authentik:2021.4.3
|
ghcr.io/goauthentik/server:2021.5.1-rc8,
|
||||||
- name: Push Docker Container to Registry (latest)
|
ghcr.io/goauthentik/server:latest
|
||||||
run: docker push beryju/authentik:latest
|
platforms: linux/amd64,linux/arm64
|
||||||
|
context: .
|
||||||
build-proxy:
|
build-proxy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v2
|
||||||
with:
|
with:
|
||||||
go-version: "^1.15"
|
go-version: "^1.15"
|
||||||
@ -37,56 +54,83 @@ jobs:
|
|||||||
cd outpost
|
cd outpost
|
||||||
go get -u github.com/go-swagger/go-swagger/cmd/swagger
|
go get -u github.com/go-swagger/go-swagger/cmd/swagger
|
||||||
swagger generate client -f ../swagger.yaml -A authentik -t pkg/
|
swagger generate client -f ../swagger.yaml -A authentik -t pkg/
|
||||||
go build -v .
|
go build -v ./cmd/proxy/server.go
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1.1.0
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
env:
|
uses: docker/login-action@v1
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
with:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
run: docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
run: |
|
uses: docker/build-push-action@v2
|
||||||
cd outpost/
|
with:
|
||||||
docker build \
|
push: ${{ github.event_name == 'release' }}
|
||||||
--no-cache \
|
tags: |
|
||||||
-t beryju/authentik-proxy:2021.4.3 \
|
beryju/authentik-proxy:2021.5.1-rc8,
|
||||||
-t beryju/authentik-proxy:latest \
|
beryju/authentik-proxy:latest,
|
||||||
-f proxy.Dockerfile .
|
ghcr.io/goauthentik/proxy:2021.5.1-rc8,
|
||||||
- name: Push Docker Container to Registry (versioned)
|
ghcr.io/goauthentik/proxy:latest
|
||||||
run: docker push beryju/authentik-proxy:2021.4.3
|
context: outpost/
|
||||||
- name: Push Docker Container to Registry (latest)
|
file: outpost/proxy.Dockerfile
|
||||||
run: docker push beryju/authentik-proxy:latest
|
platforms: linux/amd64,linux/arm64
|
||||||
build-static:
|
build-ldap:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
- name: prepare ts api client
|
- uses: actions/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: "^1.15"
|
||||||
|
- name: prepare go api client
|
||||||
run: |
|
run: |
|
||||||
docker run --rm -v $(pwd):/local openapitools/openapi-generator-cli generate -i /local/swagger.yaml -g typescript-fetch -o /local/web/api --additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
cd outpost
|
||||||
|
go get -u github.com/go-swagger/go-swagger/cmd/swagger
|
||||||
|
swagger generate client -f ../swagger.yaml -A authentik -t pkg/
|
||||||
|
go build -v ./cmd/ldap/server.go
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1.1.0
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
env:
|
uses: docker/login-action@v1
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
with:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
run: docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
run: |
|
uses: docker/build-push-action@v2
|
||||||
cd web/
|
with:
|
||||||
docker build \
|
push: ${{ github.event_name == 'release' }}
|
||||||
--no-cache \
|
tags: |
|
||||||
-t beryju/authentik-static:2021.4.3 \
|
beryju/authentik-ldap:2021.5.1-rc8,
|
||||||
-t beryju/authentik-static:latest \
|
beryju/authentik-ldap:latest,
|
||||||
-f Dockerfile .
|
ghcr.io/goauthentik/ldap:2021.5.1-rc8,
|
||||||
- name: Push Docker Container to Registry (versioned)
|
ghcr.io/goauthentik/ldap:latest
|
||||||
run: docker push beryju/authentik-static:2021.4.3
|
context: outpost/
|
||||||
- name: Push Docker Container to Registry (latest)
|
file: outpost/ldap.Dockerfile
|
||||||
run: docker push beryju/authentik-static:latest
|
platforms: linux/amd64,linux/arm64
|
||||||
test-release:
|
test-release:
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
needs:
|
needs:
|
||||||
- build-server
|
- build-server
|
||||||
- build-static
|
|
||||||
- build-proxy
|
- build-proxy
|
||||||
|
- build-ldap
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y pwgen
|
sudo apt-get install -y pwgen
|
||||||
@ -97,18 +141,19 @@ jobs:
|
|||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
||||||
sentry-release:
|
sentry-release:
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
needs:
|
needs:
|
||||||
- test-release
|
- test-release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
- name: Create a Sentry.io release
|
- name: Create a Sentry.io release
|
||||||
uses: tclindner/sentry-releases-action@v1.2.0
|
uses: getsentry/action-release@v1
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: beryjuorg
|
SENTRY_ORG: beryjuorg
|
||||||
SENTRY_PROJECT: authentik
|
SENTRY_PROJECT: authentik
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
tagName: 2021.4.3
|
version: authentik@2021.5.1-rc8
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
|
24
.github/workflows/tag.yml
vendored
24
.github/workflows/tag.yml
vendored
@ -10,7 +10,10 @@ jobs:
|
|||||||
name: Create Release from Tag
|
name: Create Release from Tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@master
|
- uses: actions/checkout@v2
|
||||||
|
- name: prepare ts api client
|
||||||
|
run: |
|
||||||
|
docker run --rm -v $(pwd):/local openapitools/openapi-generator-cli generate -i /local/swagger.yaml -g typescript-fetch -o /local/web/api --additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y pwgen
|
sudo apt-get install -y pwgen
|
||||||
@ -25,15 +28,6 @@ jobs:
|
|||||||
docker-compose up --no-start
|
docker-compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test authentik"
|
||||||
- name: Install Helm
|
|
||||||
run: |
|
|
||||||
apt update && apt install -y curl
|
|
||||||
curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash
|
|
||||||
- name: Helm package
|
|
||||||
run: |
|
|
||||||
helm dependency update helm/
|
|
||||||
helm package helm/
|
|
||||||
mv authentik-*.tgz authentik-chart.tgz
|
|
||||||
- name: Extract version number
|
- name: Extract version number
|
||||||
id: get_version
|
id: get_version
|
||||||
uses: actions/github-script@0.2.0
|
uses: actions/github-script@0.2.0
|
||||||
@ -51,13 +45,3 @@ jobs:
|
|||||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||||
draft: true
|
draft: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
- name: Upload packaged Helm Chart
|
|
||||||
id: upload-release-asset
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./authentik-chart.tgz
|
|
||||||
asset_name: authentik-chart.tgz
|
|
||||||
asset_content_type: application/gzip
|
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -202,3 +202,5 @@ selenium_screenshots/
|
|||||||
backups/
|
backups/
|
||||||
media/
|
media/
|
||||||
*mmdb
|
*mmdb
|
||||||
|
|
||||||
|
.idea/
|
||||||
|
39
Dockerfile
39
Dockerfile
@ -1,3 +1,4 @@
|
|||||||
|
# Stage 1: Lock python dependencies
|
||||||
FROM python:3.9-slim-buster as locker
|
FROM python:3.9-slim-buster as locker
|
||||||
|
|
||||||
COPY ./Pipfile /app/
|
COPY ./Pipfile /app/
|
||||||
@ -9,6 +10,34 @@ RUN pip install pipenv && \
|
|||||||
pipenv lock -r > requirements.txt && \
|
pipenv lock -r > requirements.txt && \
|
||||||
pipenv lock -rd > requirements-dev.txt
|
pipenv lock -rd > requirements-dev.txt
|
||||||
|
|
||||||
|
# Stage 2: Build webui
|
||||||
|
FROM node as npm-builder
|
||||||
|
|
||||||
|
COPY ./web /static/
|
||||||
|
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
RUN cd /static && npm i --production=false && npm run build
|
||||||
|
|
||||||
|
# Stage 3: Build go proxy
|
||||||
|
FROM golang:1.16.4 AS builder
|
||||||
|
|
||||||
|
WORKDIR /work
|
||||||
|
|
||||||
|
COPY --from=npm-builder /static/robots.txt /work/web/robots.txt
|
||||||
|
COPY --from=npm-builder /static/security.txt /work/web/security.txt
|
||||||
|
COPY --from=npm-builder /static/dist/ /work/web/dist/
|
||||||
|
COPY --from=npm-builder /static/authentik/ /work/web/authentik/
|
||||||
|
|
||||||
|
# RUN ls /work/web/static/authentik/ && exit 1
|
||||||
|
COPY ./cmd /work/cmd
|
||||||
|
COPY ./web/static.go /work/web/static.go
|
||||||
|
COPY ./internal /work/internal
|
||||||
|
COPY ./go.mod /work/go.mod
|
||||||
|
COPY ./go.sum /work/go.sum
|
||||||
|
|
||||||
|
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||||
|
|
||||||
|
# Stage 4: Run
|
||||||
FROM python:3.9-slim-buster
|
FROM python:3.9-slim-buster
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
@ -19,15 +48,16 @@ ARG GIT_BUILD_HASH
|
|||||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg && \
|
apt-get install -y --no-install-recommends curl ca-certificates gnupg git && \
|
||||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y --no-install-recommends postgresql-client-12 postgresql-client-11 build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
||||||
apt-get clean && \
|
|
||||||
pip install -r /requirements.txt --no-cache-dir && \
|
pip install -r /requirements.txt --no-cache-dir && \
|
||||||
apt-get remove --purge -y build-essential && \
|
apt-get remove --purge -y build-essential git && \
|
||||||
apt-get autoremove --purge -y && \
|
apt-get autoremove --purge -y && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
# This is quite hacky, but docker has no guaranteed Group ID
|
# This is quite hacky, but docker has no guaranteed Group ID
|
||||||
# we could instead check for the GID of the socket and add the user dynamically,
|
# we could instead check for the GID of the socket and add the user dynamically,
|
||||||
# but then we have to drop permmissions later
|
# but then we have to drop permmissions later
|
||||||
@ -44,6 +74,7 @@ COPY ./pyproject.toml /
|
|||||||
COPY ./xml /xml
|
COPY ./xml /xml
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
|
COPY --from=builder /work/authentik /authentik-proxy
|
||||||
|
|
||||||
USER authentik
|
USER authentik
|
||||||
STOPSIGNAL SIGINT
|
STOPSIGNAL SIGINT
|
||||||
|
23
Makefile
23
Makefile
@ -1,4 +1,7 @@
|
|||||||
all: lint-fix lint coverage gen
|
.SHELLFLAGS += -x -e
|
||||||
|
PWD = $(shell pwd)
|
||||||
|
|
||||||
|
all: lint-fix lint test gen
|
||||||
|
|
||||||
test-integration:
|
test-integration:
|
||||||
k3d cluster create || exit 0
|
k3d cluster create || exit 0
|
||||||
@ -8,7 +11,7 @@ test-integration:
|
|||||||
test-e2e:
|
test-e2e:
|
||||||
coverage run manage.py test --failfast -v 3 tests/e2e
|
coverage run manage.py test --failfast -v 3 tests/e2e
|
||||||
|
|
||||||
coverage:
|
test:
|
||||||
coverage run manage.py test -v 3 authentik
|
coverage run manage.py test -v 3 authentik
|
||||||
coverage html
|
coverage html
|
||||||
coverage report
|
coverage report
|
||||||
@ -22,8 +25,16 @@ lint:
|
|||||||
bandit -r authentik tests lifecycle -x node_modules
|
bandit -r authentik tests lifecycle -x node_modules
|
||||||
pylint authentik tests lifecycle
|
pylint authentik tests lifecycle
|
||||||
|
|
||||||
gen: coverage
|
gen:
|
||||||
./manage.py generate_swagger -o swagger.yaml -f yaml
|
./manage.py generate_swagger -o swagger.yaml -f yaml
|
||||||
|
docker run \
|
||||||
|
--rm -v ${PWD}:/local \
|
||||||
|
openapitools/openapi-generator-cli generate \
|
||||||
|
-i /local/swagger.yaml \
|
||||||
|
-g typescript-fetch \
|
||||||
|
-o /local/web/api \
|
||||||
|
--additional-properties=typescriptThreePlus=true,supportsES6=true,npmName=authentik-api,npmVersion=1.0.0
|
||||||
|
cd web/api && npx tsc
|
||||||
|
|
||||||
local-stack:
|
local-stack:
|
||||||
export AUTHENTIK_TAG=testing
|
export AUTHENTIK_TAG=testing
|
||||||
@ -31,7 +42,5 @@ local-stack:
|
|||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
docker-compose run --rm server migrate
|
docker-compose run --rm server migrate
|
||||||
|
|
||||||
build-static:
|
run:
|
||||||
docker-compose -f scripts/ci.docker-compose.yml up -d
|
go run -v cmd/server/main.go
|
||||||
docker build -t beryju/authentik-static -f static.Dockerfile --network=scripts_default .
|
|
||||||
docker-compose -f scripts/ci.docker-compose.yml down -v
|
|
||||||
|
7
Pipfile
7
Pipfile
@ -11,7 +11,7 @@ channels-redis = "*"
|
|||||||
dacite = "*"
|
dacite = "*"
|
||||||
defusedxml = "*"
|
defusedxml = "*"
|
||||||
django = "*"
|
django = "*"
|
||||||
django-dbbackup = "*"
|
django-dbbackup = { git = 'https://github.com/django-dbbackup/django-dbbackup.git', ref = '9d1909c30a3271c8c9c8450add30d6e0b996e145' }
|
||||||
django-filter = "*"
|
django-filter = "*"
|
||||||
django-guardian = "*"
|
django-guardian = "*"
|
||||||
django-model-utils = "*"
|
django-model-utils = "*"
|
||||||
@ -32,7 +32,7 @@ lxml = ">=4.6.3"
|
|||||||
packaging = "*"
|
packaging = "*"
|
||||||
psycopg2-binary = "*"
|
psycopg2-binary = "*"
|
||||||
pycryptodome = "*"
|
pycryptodome = "*"
|
||||||
pyjwkest = "*"
|
pyjwt = "*"
|
||||||
pyyaml = "*"
|
pyyaml = "*"
|
||||||
requests-oauthlib = "*"
|
requests-oauthlib = "*"
|
||||||
sentry-sdk = "*"
|
sentry-sdk = "*"
|
||||||
@ -50,7 +50,7 @@ python_version = "3.9"
|
|||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
bandit = "*"
|
bandit = "*"
|
||||||
black = "==20.8b1"
|
black = "==21.5b1"
|
||||||
bump2version = "*"
|
bump2version = "*"
|
||||||
colorama = "*"
|
colorama = "*"
|
||||||
coverage = "*"
|
coverage = "*"
|
||||||
@ -59,3 +59,4 @@ pylint-django = "*"
|
|||||||
pytest = "*"
|
pytest = "*"
|
||||||
pytest-django = "*"
|
pytest-django = "*"
|
||||||
selenium = "*"
|
selenium = "*"
|
||||||
|
requests-mock = "*"
|
||||||
|
512
Pipfile.lock
generated
512
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
11
README.md
11
README.md
@ -5,12 +5,13 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
[](https://discord.gg/jg33eMhnj6)
|
[](https://discord.gg/jg33eMhnj6)
|
||||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=1)
|
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||||
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=1)
|
[](https://dev.azure.com/beryjuorg/authentik/_build?definitionId=6)
|
||||||
[](https://codecov.io/gh/BeryJu/authentik)
|
[](https://codecov.io/gh/goauthentik/authentik)
|
||||||

|

|
||||||

|

|
||||||

|

|
||||||
|
[Transifex](https://www.transifex.com/beryjuorg/authentik/)
|
||||||
|
|
||||||
## What is authentik?
|
## What is authentik?
|
||||||
|
|
||||||
@ -31,7 +32,7 @@ Light | Dark
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
See [Development Documentation](https://goauthentik.io/docs/development/local-dev-environment)
|
See [Development Documentation](https://goauthentik.io/developer-docs/)
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
|
@ -4,8 +4,8 @@
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ---------- | ------------------ |
|
| ---------- | ------------------ |
|
||||||
| 2021.3.x | :white_check_mark: |
|
|
||||||
| 2021.4.x | :white_check_mark: |
|
| 2021.4.x | :white_check_mark: |
|
||||||
|
| 2021.5.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
"""authentik"""
|
"""authentik"""
|
||||||
__version__ = "2021.4.3"
|
__version__ = "2021.5.1-rc8"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
@ -4,7 +4,7 @@ from celery.schedules import crontab
|
|||||||
CELERY_BEAT_SCHEDULE = {
|
CELERY_BEAT_SCHEDULE = {
|
||||||
"admin_latest_version": {
|
"admin_latest_version": {
|
||||||
"task": "authentik.admin.tasks.update_latest_version",
|
"task": "authentik.admin.tasks.update_latest_version",
|
||||||
"schedule": crontab(minute=0), # Run every hour
|
"schedule": crontab(minute="*/60"), # Run every hour
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,9 @@ URL_FINDER = URLValidator.regex.pattern[1:]
|
|||||||
def update_latest_version(self: MonitoredTask):
|
def update_latest_version(self: MonitoredTask):
|
||||||
"""Update latest version info"""
|
"""Update latest version info"""
|
||||||
try:
|
try:
|
||||||
response = get("https://api.github.com/repos/beryju/authentik/releases/latest")
|
response = get(
|
||||||
|
"https://api.github.com/repos/goauthentik/authentik/releases/latest"
|
||||||
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
data = response.json()
|
data = response.json()
|
||||||
tag_name = data.get("tag_name")
|
tag_name = data.get("tag_name")
|
||||||
|
@ -7,6 +7,7 @@ from django.urls import reverse
|
|||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.core.tasks import clean_expired_models
|
from authentik.core.tasks import clean_expired_models
|
||||||
|
from authentik.events.monitored_tasks import TaskResultStatus
|
||||||
|
|
||||||
|
|
||||||
class TestAdminAPI(TestCase):
|
class TestAdminAPI(TestCase):
|
||||||
@ -30,6 +31,26 @@ class TestAdminAPI(TestCase):
|
|||||||
any(task["task_name"] == "clean_expired_models" for task in body)
|
any(task["task_name"] == "clean_expired_models" for task in body)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_tasks_single(self):
|
||||||
|
"""Test Task API (read single)"""
|
||||||
|
clean_expired_models.delay()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:admin_system_tasks-detail",
|
||||||
|
kwargs={"pk": "clean_expired_models"},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content)
|
||||||
|
self.assertEqual(body["status"], TaskResultStatus.SUCCESSFUL.name)
|
||||||
|
self.assertEqual(body["task_name"], "clean_expired_models")
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:admin_system_tasks-detail", kwargs={"pk": "qwerqwer"}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 404)
|
||||||
|
|
||||||
def test_tasks_retry(self):
|
def test_tasks_retry(self):
|
||||||
"""Test Task API (retry)"""
|
"""Test Task API (retry)"""
|
||||||
clean_expired_models.delay()
|
clean_expired_models.delay()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""API Authentication"""
|
"""API Authentication"""
|
||||||
from base64 import b64decode, b64encode
|
from base64 import b64decode
|
||||||
from binascii import Error
|
from binascii import Error
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
@ -19,14 +19,6 @@ def token_from_header(raw_header: bytes) -> Optional[Token]:
|
|||||||
auth_credentials = raw_header.decode()
|
auth_credentials = raw_header.decode()
|
||||||
if auth_credentials == "":
|
if auth_credentials == "":
|
||||||
return None
|
return None
|
||||||
# Legacy, accept basic auth thats fully encoded (2021.3 outposts)
|
|
||||||
if " " not in auth_credentials:
|
|
||||||
try:
|
|
||||||
plain = b64decode(auth_credentials.encode()).decode()
|
|
||||||
auth_type, body = plain.split()
|
|
||||||
auth_credentials = f"{auth_type} {b64encode(body.encode()).decode()}"
|
|
||||||
except (UnicodeDecodeError, Error):
|
|
||||||
raise AuthenticationFailed("Malformed header")
|
|
||||||
auth_type, auth_credentials = auth_credentials.split()
|
auth_type, auth_credentials = auth_credentials.split()
|
||||||
if auth_type.lower() not in ["basic", "bearer"]:
|
if auth_type.lower() not in ["basic", "bearer"]:
|
||||||
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
LOGGER.debug("Unsupported authentication type, denying", type=auth_type.lower())
|
||||||
@ -62,4 +54,4 @@ class AuthentikTokenAuthentication(BaseAuthentication):
|
|||||||
if not token:
|
if not token:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return (token.user, None)
|
return (token.user, None) # pragma: no cover
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
{% block title %}
|
{% block title %}
|
||||||
authentik API Browser
|
API Browser - {{ config.authentik.branding.title }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
|
16
authentik/api/tests/test_config.py
Normal file
16
authentik/api/tests/test_config.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Test config API"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfig(APITestCase):
|
||||||
|
"""Test config API"""
|
||||||
|
|
||||||
|
def test_config(self):
|
||||||
|
"""Test YAML generation"""
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:configs-list"),
|
||||||
|
)
|
||||||
|
self.assertTrue(loads(response.content.decode()))
|
33
authentik/api/tests/test_decorators.py
Normal file
33
authentik/api/tests/test_decorators.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"""test decorators api"""
|
||||||
|
from django.urls import reverse
|
||||||
|
from guardian.shortcuts import assign_perm
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application, User
|
||||||
|
|
||||||
|
|
||||||
|
class TestAPIDecorators(APITestCase):
|
||||||
|
"""test decorators api"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
super().setUp()
|
||||||
|
self.user = User.objects.create(username="test-user")
|
||||||
|
|
||||||
|
def test_obj_perm_denied(self):
|
||||||
|
"""Test object perm denied"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
app = Application.objects.create(name="denied", slug="denied")
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
|
||||||
|
def test_other_perm_denied(self):
|
||||||
|
"""Test other perm denied"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
app = Application.objects.create(name="denied", slug="denied")
|
||||||
|
assign_perm("authentik_core.view_application", self.user, app)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:application-metrics", kwargs={"slug": app.slug})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 403)
|
@ -22,3 +22,10 @@ class TestSwaggerGeneration(APITestCase):
|
|||||||
reverse("authentik_api:schema-json", kwargs={"format": ".json"}),
|
reverse("authentik_api:schema-json", kwargs={"format": ".json"}),
|
||||||
)
|
)
|
||||||
self.assertTrue(loads(response.content.decode()))
|
self.assertTrue(loads(response.content.decode()))
|
||||||
|
|
||||||
|
def test_browser(self):
|
||||||
|
"""Test API Browser"""
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:swagger"),
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -47,6 +47,7 @@ from authentik.policies.reputation.api import (
|
|||||||
ReputationPolicyViewSet,
|
ReputationPolicyViewSet,
|
||||||
UserReputationViewSet,
|
UserReputationViewSet,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
|
||||||
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
from authentik.providers.oauth2.api.provider import OAuth2ProviderViewSet
|
||||||
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
from authentik.providers.oauth2.api.scope import ScopeMappingViewSet
|
||||||
from authentik.providers.oauth2.api.tokens import (
|
from authentik.providers.oauth2.api.tokens import (
|
||||||
@ -63,6 +64,7 @@ from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
|||||||
from authentik.sources.oauth.api.source_connection import (
|
from authentik.sources.oauth.api.source_connection import (
|
||||||
UserOAuthSourceConnectionViewSet,
|
UserOAuthSourceConnectionViewSet,
|
||||||
)
|
)
|
||||||
|
from authentik.sources.plex.api import PlexSourceViewSet
|
||||||
from authentik.sources.saml.api import SAMLSourceViewSet
|
from authentik.sources.saml.api import SAMLSourceViewSet
|
||||||
from authentik.stages.authenticator_static.api import (
|
from authentik.stages.authenticator_static.api import (
|
||||||
AuthenticatorStaticStageViewSet,
|
AuthenticatorStaticStageViewSet,
|
||||||
@ -120,6 +122,7 @@ router.register(
|
|||||||
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
|
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
|
||||||
)
|
)
|
||||||
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||||
|
router.register("outposts/ldap", LDAPOutpostConfigViewSet)
|
||||||
|
|
||||||
router.register("flows/instances", FlowViewSet)
|
router.register("flows/instances", FlowViewSet)
|
||||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||||
@ -136,6 +139,7 @@ router.register("sources/oauth_user_connections", UserOAuthSourceConnectionViewS
|
|||||||
router.register("sources/ldap", LDAPSourceViewSet)
|
router.register("sources/ldap", LDAPSourceViewSet)
|
||||||
router.register("sources/saml", SAMLSourceViewSet)
|
router.register("sources/saml", SAMLSourceViewSet)
|
||||||
router.register("sources/oauth", OAuthSourceViewSet)
|
router.register("sources/oauth", OAuthSourceViewSet)
|
||||||
|
router.register("sources/plex", PlexSourceViewSet)
|
||||||
|
|
||||||
router.register("policies/all", PolicyViewSet)
|
router.register("policies/all", PolicyViewSet)
|
||||||
router.register("policies/bindings", PolicyBindingViewSet)
|
router.register("policies/bindings", PolicyBindingViewSet)
|
||||||
@ -149,6 +153,7 @@ router.register("policies/reputation/ips", IPReputationViewSet)
|
|||||||
router.register("policies/reputation", ReputationPolicyViewSet)
|
router.register("policies/reputation", ReputationPolicyViewSet)
|
||||||
|
|
||||||
router.register("providers/all", ProviderViewSet)
|
router.register("providers/all", ProviderViewSet)
|
||||||
|
router.register("providers/ldap", LDAPProviderViewSet)
|
||||||
router.register("providers/proxy", ProxyProviderViewSet)
|
router.register("providers/proxy", ProxyProviderViewSet)
|
||||||
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
router.register("providers/oauth2", OAuth2ProviderViewSet)
|
||||||
router.register("providers/saml", SAMLProviderViewSet)
|
router.register("providers/saml", SAMLProviderViewSet)
|
||||||
@ -196,7 +201,8 @@ info = openapi.Info(
|
|||||||
default_version="v2beta",
|
default_version="v2beta",
|
||||||
contact=openapi.Contact(email="hello@beryju.org"),
|
contact=openapi.Contact(email="hello@beryju.org"),
|
||||||
license=openapi.License(
|
license=openapi.License(
|
||||||
name="GNU GPLv3", url="https://github.com/BeryJu/authentik/blob/master/LICENSE"
|
name="GNU GPLv3",
|
||||||
|
url="https://github.com/goauthentik/authentik/blob/master/LICENSE",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
SchemaView = get_schema_view(info, public=True, permission_classes=(AllowAny,))
|
SchemaView = get_schema_view(info, public=True, permission_classes=(AllowAny,))
|
||||||
|
@ -4,6 +4,7 @@ from typing import Optional
|
|||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.http.response import HttpResponseBadRequest
|
from django.http.response import HttpResponseBadRequest
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
from drf_yasg import openapi
|
from drf_yasg import openapi
|
||||||
from drf_yasg.utils import no_body, swagger_auto_schema
|
from drf_yasg.utils import no_body, swagger_auto_schema
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
@ -91,6 +92,25 @@ class ApplicationViewSet(ModelViewSet):
|
|||||||
applications.append(application)
|
applications.append(application)
|
||||||
return applications
|
return applications
|
||||||
|
|
||||||
|
@swagger_auto_schema(
|
||||||
|
responses={
|
||||||
|
204: "Access granted",
|
||||||
|
403: "Access denied",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["GET"])
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def check_access(self, request: Request, slug: str) -> Response:
|
||||||
|
"""Check access to a single application by slug"""
|
||||||
|
# Don't use self.get_object as that checks for view_application permission
|
||||||
|
# which the user might not have, even if they have access
|
||||||
|
application = get_object_or_404(Application, slug=slug)
|
||||||
|
engine = PolicyEngine(application, self.request.user, self.request)
|
||||||
|
engine.build()
|
||||||
|
if engine.passing:
|
||||||
|
return Response(status=204)
|
||||||
|
return Response(status=403)
|
||||||
|
|
||||||
@swagger_auto_schema(
|
@swagger_auto_schema(
|
||||||
manual_parameters=[
|
manual_parameters=[
|
||||||
openapi.Parameter(
|
openapi.Parameter(
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
"""Groups API Viewset"""
|
"""Groups API Viewset"""
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from rest_framework.fields import JSONField
|
from rest_framework.fields import JSONField
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.core.api.utils import is_dict
|
from authentik.core.api.utils import is_dict
|
||||||
from authentik.core.models import Group
|
from authentik.core.models import Group
|
||||||
@ -26,3 +28,16 @@ class GroupViewSet(ModelViewSet):
|
|||||||
search_fields = ["name", "is_superuser"]
|
search_fields = ["name", "is_superuser"]
|
||||||
filterset_fields = ["name", "is_superuser"]
|
filterset_fields = ["name", "is_superuser"]
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_group"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
@ -45,6 +45,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"verbose_name",
|
"verbose_name",
|
||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
"policy_engine_mode",
|
"policy_engine_mode",
|
||||||
|
"user_matching_mode",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,18 +1,30 @@
|
|||||||
"""User API Views"""
|
"""User API Views"""
|
||||||
|
from json import loads
|
||||||
|
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.http.response import Http404
|
from django.http.response import Http404
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
|
from django_filters.filters import BooleanFilter, CharFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
from drf_yasg.utils import swagger_auto_schema, swagger_serializer_method
|
from drf_yasg.utils import swagger_auto_schema, swagger_serializer_method
|
||||||
from guardian.utils import get_anonymous_user
|
from guardian.utils import get_anonymous_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
from rest_framework.fields import CharField, JSONField, SerializerMethodField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import BooleanField, ModelSerializer
|
from rest_framework.serializers import (
|
||||||
|
BooleanField,
|
||||||
|
ListSerializer,
|
||||||
|
ModelSerializer,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
from authentik.admin.api.metrics import CoordinateSerializer, get_events_per_1h
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
|
from authentik.core.api.groups import GroupSerializer
|
||||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||||
from authentik.core.middleware import (
|
from authentik.core.middleware import (
|
||||||
SESSION_IMPERSONATE_ORIGINAL_USER,
|
SESSION_IMPERSONATE_ORIGINAL_USER,
|
||||||
@ -29,6 +41,8 @@ class UserSerializer(ModelSerializer):
|
|||||||
is_superuser = BooleanField(read_only=True)
|
is_superuser = BooleanField(read_only=True)
|
||||||
avatar = CharField(read_only=True)
|
avatar = CharField(read_only=True)
|
||||||
attributes = JSONField(validators=[is_dict], required=False)
|
attributes = JSONField(validators=[is_dict], required=False)
|
||||||
|
groups = ListSerializer(child=GroupSerializer(), read_only=True, source="ak_groups")
|
||||||
|
uid = CharField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -40,9 +54,11 @@ class UserSerializer(ModelSerializer):
|
|||||||
"is_active",
|
"is_active",
|
||||||
"last_login",
|
"last_login",
|
||||||
"is_superuser",
|
"is_superuser",
|
||||||
|
"groups",
|
||||||
"email",
|
"email",
|
||||||
"avatar",
|
"avatar",
|
||||||
"attributes",
|
"attributes",
|
||||||
|
"uid",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -84,13 +100,44 @@ class UserMetricsSerializer(PassiveSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UsersFilter(FilterSet):
|
||||||
|
"""Filter for users"""
|
||||||
|
|
||||||
|
attributes = CharFilter(
|
||||||
|
field_name="attributes",
|
||||||
|
lookup_expr="",
|
||||||
|
label="Attributes",
|
||||||
|
method="filter_attributes",
|
||||||
|
)
|
||||||
|
|
||||||
|
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def filter_attributes(self, queryset, name, value):
|
||||||
|
"""Filter attributes by query args"""
|
||||||
|
try:
|
||||||
|
value = loads(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ValidationError(detail="filter: failed to parse JSON")
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
raise ValidationError(detail="filter: value must be key:value mapping")
|
||||||
|
qs = {}
|
||||||
|
for key, _value in value.items():
|
||||||
|
qs[f"attributes__{key}"] = _value
|
||||||
|
return queryset.filter(**qs)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = ["username", "name", "is_active", "is_superuser", "attributes"]
|
||||||
|
|
||||||
|
|
||||||
class UserViewSet(ModelViewSet):
|
class UserViewSet(ModelViewSet):
|
||||||
"""User Viewset"""
|
"""User Viewset"""
|
||||||
|
|
||||||
queryset = User.objects.none()
|
queryset = User.objects.none()
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
search_fields = ["username", "name", "is_active"]
|
search_fields = ["username", "name", "is_active"]
|
||||||
filterset_fields = ["username", "name", "is_active"]
|
filterset_class = UsersFilter
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return User.objects.all().exclude(pk=get_anonymous_user().pk)
|
return User.objects.all().exclude(pk=get_anonymous_user().pk)
|
||||||
@ -144,3 +191,16 @@ class UserViewSet(ModelViewSet):
|
|||||||
reverse_lazy("authentik_flows:default-recovery") + f"?{querystring}"
|
reverse_lazy("authentik_flows:default-recovery") + f"?{querystring}"
|
||||||
)
|
)
|
||||||
return Response({"link": link})
|
return Response({"link": link})
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_group"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
@ -20,10 +20,12 @@ def is_dict(value: Any):
|
|||||||
class PassiveSerializer(Serializer):
|
class PassiveSerializer(Serializer):
|
||||||
"""Base serializer class which doesn't implement create/update methods"""
|
"""Base serializer class which doesn't implement create/update methods"""
|
||||||
|
|
||||||
def create(self, validated_data: dict) -> Model:
|
def create(self, validated_data: dict) -> Model: # pragma: no cover
|
||||||
return Model()
|
return Model()
|
||||||
|
|
||||||
def update(self, instance: Model, validated_data: dict) -> Model:
|
def update(
|
||||||
|
self, instance: Model, validated_data: dict
|
||||||
|
) -> Model: # pragma: no cover
|
||||||
return Model()
|
return Model()
|
||||||
|
|
||||||
|
|
||||||
|
40
authentik/core/migrations/0020_source_user_matching_mode.py
Normal file
40
authentik/core/migrations/0020_source_user_matching_mode.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-05-03 17:06
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0019_source_managed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="source",
|
||||||
|
name="user_matching_mode",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
("identifier", "Use the source-specific identifier"),
|
||||||
|
(
|
||||||
|
"email_link",
|
||||||
|
"Link to a user with identical email address. Can have security implications when a source doesn't validate email addresses.",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"email_deny",
|
||||||
|
"Use the user's email address, but deny enrollment when the email address already exists.",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"username_link",
|
||||||
|
"Link to a user with identical username address. Can have security implications when a username is used with another source.",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"username_deny",
|
||||||
|
"Use the user's username, but deny enrollment when the username already exists.",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
default="identifier",
|
||||||
|
help_text="How the source determines if an existing user should be authenticated or a new user enrolled.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -34,6 +34,7 @@ from authentik.policies.models import PolicyBindingModel
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
||||||
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
||||||
|
USER_ATTRIBUTE_SOURCES = "goauthentik.io/user/sources"
|
||||||
|
|
||||||
GRAVATAR_URL = "https://secure.gravatar.com"
|
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||||
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
DEFAULT_AVATAR = static("dist/assets/images/user_default.png")
|
||||||
@ -240,6 +241,30 @@ class Application(PolicyBindingModel):
|
|||||||
verbose_name_plural = _("Applications")
|
verbose_name_plural = _("Applications")
|
||||||
|
|
||||||
|
|
||||||
|
class SourceUserMatchingModes(models.TextChoices):
|
||||||
|
"""Different modes a source can handle new/returning users"""
|
||||||
|
|
||||||
|
IDENTIFIER = "identifier", _("Use the source-specific identifier")
|
||||||
|
EMAIL_LINK = "email_link", _(
|
||||||
|
(
|
||||||
|
"Link to a user with identical email address. Can have security implications "
|
||||||
|
"when a source doesn't validate email addresses."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
EMAIL_DENY = "email_deny", _(
|
||||||
|
"Use the user's email address, but deny enrollment when the email address already exists."
|
||||||
|
)
|
||||||
|
USERNAME_LINK = "username_link", _(
|
||||||
|
(
|
||||||
|
"Link to a user with identical username address. Can have security implications "
|
||||||
|
"when a username is used with another source."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
USERNAME_DENY = "username_deny", _(
|
||||||
|
"Use the user's username, but deny enrollment when the username already exists."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||||
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
||||||
|
|
||||||
@ -272,6 +297,17 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||||||
related_name="source_enrollment",
|
related_name="source_enrollment",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
user_matching_mode = models.TextField(
|
||||||
|
choices=SourceUserMatchingModes.choices,
|
||||||
|
default=SourceUserMatchingModes.IDENTIFIER,
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"How the source determines if an existing user should be authenticated or "
|
||||||
|
"a new user enrolled."
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
objects = InheritanceManager()
|
objects = InheritanceManager()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -301,6 +337,8 @@ class UserSourceConnection(CreatedUpdatedModel):
|
|||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
source = models.ForeignKey(Source, on_delete=models.CASCADE)
|
source = models.ForeignKey(Source, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
objects = InheritanceManager()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
unique_together = (("user", "source"),)
|
unique_together = (("user", "source"),)
|
||||||
|
0
authentik/core/sources/__init__.py
Normal file
0
authentik/core/sources/__init__.py
Normal file
286
authentik/core/sources/flow_manager.py
Normal file
286
authentik/core/sources/flow_manager.py
Normal file
@ -0,0 +1,286 @@
|
|||||||
|
"""Source decision helper"""
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Optional, Type
|
||||||
|
|
||||||
|
from django.contrib import messages
|
||||||
|
from django.db import IntegrityError
|
||||||
|
from django.db.models.query_utils import Q
|
||||||
|
from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest
|
||||||
|
from django.shortcuts import redirect
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils.translation import gettext as _
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
Source,
|
||||||
|
SourceUserMatchingModes,
|
||||||
|
User,
|
||||||
|
UserSourceConnection,
|
||||||
|
)
|
||||||
|
from authentik.core.sources.stage import (
|
||||||
|
PLAN_CONTEXT_SOURCES_CONNECTION,
|
||||||
|
PostUserEnrollmentStage,
|
||||||
|
)
|
||||||
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.flows.models import Flow, Stage, in_memory_stage
|
||||||
|
from authentik.flows.planner import (
|
||||||
|
PLAN_CONTEXT_PENDING_USER,
|
||||||
|
PLAN_CONTEXT_REDIRECT,
|
||||||
|
PLAN_CONTEXT_SOURCE,
|
||||||
|
PLAN_CONTEXT_SSO,
|
||||||
|
FlowPlanner,
|
||||||
|
)
|
||||||
|
from authentik.flows.views import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||||
|
from authentik.lib.utils.urls import redirect_with_qs
|
||||||
|
from authentik.policies.utils import delete_none_keys
|
||||||
|
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
|
||||||
|
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||||
|
|
||||||
|
|
||||||
|
class Action(Enum):
|
||||||
|
"""Actions that can be decided based on the request
|
||||||
|
and source settings"""
|
||||||
|
|
||||||
|
LINK = "link"
|
||||||
|
AUTH = "auth"
|
||||||
|
ENROLL = "enroll"
|
||||||
|
DENY = "deny"
|
||||||
|
|
||||||
|
|
||||||
|
class SourceFlowManager:
|
||||||
|
"""Help sources decide what they should do after authorization. Based on source settings and
|
||||||
|
previous connections, authenticate the user, enroll a new user, link to an existing user
|
||||||
|
or deny the request."""
|
||||||
|
|
||||||
|
source: Source
|
||||||
|
request: HttpRequest
|
||||||
|
|
||||||
|
identifier: str
|
||||||
|
|
||||||
|
connection_type: Type[UserSourceConnection] = UserSourceConnection
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
source: Source,
|
||||||
|
request: HttpRequest,
|
||||||
|
identifier: str,
|
||||||
|
enroll_info: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
self.source = source
|
||||||
|
self.request = request
|
||||||
|
self.identifier = identifier
|
||||||
|
self.enroll_info = enroll_info
|
||||||
|
self._logger = get_logger().bind(source=source, identifier=identifier)
|
||||||
|
|
||||||
|
# pylint: disable=too-many-return-statements
|
||||||
|
def get_action(self, **kwargs) -> tuple[Action, Optional[UserSourceConnection]]:
|
||||||
|
"""decide which action should be taken"""
|
||||||
|
new_connection = self.connection_type(
|
||||||
|
source=self.source, identifier=self.identifier
|
||||||
|
)
|
||||||
|
# When request is authenticated, always link
|
||||||
|
if self.request.user.is_authenticated:
|
||||||
|
new_connection.user = self.request.user
|
||||||
|
new_connection = self.update_connection(new_connection, **kwargs)
|
||||||
|
new_connection.save()
|
||||||
|
return Action.LINK, new_connection
|
||||||
|
|
||||||
|
existing_connections = self.connection_type.objects.filter(
|
||||||
|
source=self.source, identifier=self.identifier
|
||||||
|
)
|
||||||
|
if existing_connections.exists():
|
||||||
|
connection = existing_connections.first()
|
||||||
|
return Action.AUTH, self.update_connection(connection, **kwargs)
|
||||||
|
# No connection exists, but we match on identifier, so enroll
|
||||||
|
if self.source.user_matching_mode == SourceUserMatchingModes.IDENTIFIER:
|
||||||
|
# We don't save the connection here cause it doesn't have a user assigned yet
|
||||||
|
return Action.ENROLL, self.update_connection(new_connection, **kwargs)
|
||||||
|
|
||||||
|
# Check for existing users with matching attributes
|
||||||
|
query = Q()
|
||||||
|
# Either query existing user based on email or username
|
||||||
|
if self.source.user_matching_mode in [
|
||||||
|
SourceUserMatchingModes.EMAIL_LINK,
|
||||||
|
SourceUserMatchingModes.EMAIL_DENY,
|
||||||
|
]:
|
||||||
|
if not self.enroll_info.get("email", None):
|
||||||
|
self._logger.warning("Refusing to use none email", source=self.source)
|
||||||
|
return Action.DENY, None
|
||||||
|
query = Q(email__exact=self.enroll_info.get("email", None))
|
||||||
|
if self.source.user_matching_mode in [
|
||||||
|
SourceUserMatchingModes.USERNAME_LINK,
|
||||||
|
SourceUserMatchingModes.USERNAME_DENY,
|
||||||
|
]:
|
||||||
|
if not self.enroll_info.get("username", None):
|
||||||
|
self._logger.warning(
|
||||||
|
"Refusing to use none username", source=self.source
|
||||||
|
)
|
||||||
|
return Action.DENY, None
|
||||||
|
query = Q(username__exact=self.enroll_info.get("username", None))
|
||||||
|
self._logger.debug("trying to link with existing user", query=query)
|
||||||
|
matching_users = User.objects.filter(query)
|
||||||
|
# No matching users, always enroll
|
||||||
|
if not matching_users.exists():
|
||||||
|
self._logger.debug("no matching users found, enrolling")
|
||||||
|
return Action.ENROLL, self.update_connection(new_connection, **kwargs)
|
||||||
|
|
||||||
|
user = matching_users.first()
|
||||||
|
if self.source.user_matching_mode in [
|
||||||
|
SourceUserMatchingModes.EMAIL_LINK,
|
||||||
|
SourceUserMatchingModes.USERNAME_LINK,
|
||||||
|
]:
|
||||||
|
new_connection.user = user
|
||||||
|
new_connection = self.update_connection(new_connection, **kwargs)
|
||||||
|
new_connection.save()
|
||||||
|
return Action.LINK, new_connection
|
||||||
|
if self.source.user_matching_mode in [
|
||||||
|
SourceUserMatchingModes.EMAIL_DENY,
|
||||||
|
SourceUserMatchingModes.USERNAME_DENY,
|
||||||
|
]:
|
||||||
|
self._logger.info("denying source because user exists", user=user)
|
||||||
|
return Action.DENY, None
|
||||||
|
# Should never get here as default enroll case is returned above.
|
||||||
|
return Action.DENY, None
|
||||||
|
|
||||||
|
def update_connection(
|
||||||
|
self, connection: UserSourceConnection, **kwargs
|
||||||
|
) -> UserSourceConnection:
|
||||||
|
"""Optionally make changes to the connection after it is looked up/created."""
|
||||||
|
return connection
|
||||||
|
|
||||||
|
def get_flow(self, **kwargs) -> HttpResponse:
|
||||||
|
"""Get the flow response based on user_matching_mode"""
|
||||||
|
try:
|
||||||
|
action, connection = self.get_action(**kwargs)
|
||||||
|
except IntegrityError as exc:
|
||||||
|
self._logger.warning("failed to get action", exc=exc)
|
||||||
|
return redirect("/")
|
||||||
|
self._logger.debug("get_action() says", action=action, connection=connection)
|
||||||
|
if connection:
|
||||||
|
if action == Action.LINK:
|
||||||
|
self._logger.debug("Linking existing user")
|
||||||
|
return self.handle_existing_user_link(connection)
|
||||||
|
if action == Action.AUTH:
|
||||||
|
self._logger.debug("Handling auth user")
|
||||||
|
return self.handle_auth_user(connection)
|
||||||
|
if action == Action.ENROLL:
|
||||||
|
self._logger.debug("Handling enrollment of new user")
|
||||||
|
return self.handle_enroll(connection)
|
||||||
|
# Default case, assume deny
|
||||||
|
messages.error(
|
||||||
|
self.request,
|
||||||
|
_(
|
||||||
|
(
|
||||||
|
"Request to authenticate with %(source)s has been denied. Please authenticate "
|
||||||
|
"with the source you've previously signed up with."
|
||||||
|
)
|
||||||
|
% {"source": self.source.name}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return redirect("/")
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
|
||||||
|
"""Hook to override stages which are appended to the flow"""
|
||||||
|
if flow.slug == self.source.enrollment_flow.slug:
|
||||||
|
return [
|
||||||
|
in_memory_stage(PostUserEnrollmentStage),
|
||||||
|
]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _handle_login_flow(self, flow: Flow, **kwargs) -> HttpResponse:
|
||||||
|
"""Prepare Authentication Plan, redirect user FlowExecutor"""
|
||||||
|
# Ensure redirect is carried through when user was trying to
|
||||||
|
# authorize application
|
||||||
|
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
|
||||||
|
NEXT_ARG_NAME, "authentik_core:if-admin"
|
||||||
|
)
|
||||||
|
kwargs.update(
|
||||||
|
{
|
||||||
|
# Since we authenticate the user by their token, they have no backend set
|
||||||
|
PLAN_CONTEXT_AUTHENTICATION_BACKEND: "django.contrib.auth.backends.ModelBackend",
|
||||||
|
PLAN_CONTEXT_SSO: True,
|
||||||
|
PLAN_CONTEXT_SOURCE: self.source,
|
||||||
|
PLAN_CONTEXT_REDIRECT: final_redirect,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if not flow:
|
||||||
|
return HttpResponseBadRequest()
|
||||||
|
# We run the Flow planner here so we can pass the Pending user in the context
|
||||||
|
planner = FlowPlanner(flow)
|
||||||
|
plan = planner.plan(self.request, kwargs)
|
||||||
|
for stage in self.get_stages_to_append(flow):
|
||||||
|
plan.append(stage)
|
||||||
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
|
return redirect_with_qs(
|
||||||
|
"authentik_core:if-flow",
|
||||||
|
self.request.GET,
|
||||||
|
flow_slug=flow.slug,
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def handle_auth_user(
|
||||||
|
self,
|
||||||
|
connection: UserSourceConnection,
|
||||||
|
) -> HttpResponse:
|
||||||
|
"""Login user and redirect."""
|
||||||
|
messages.success(
|
||||||
|
self.request,
|
||||||
|
_(
|
||||||
|
"Successfully authenticated with %(source)s!"
|
||||||
|
% {"source": self.source.name}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
flow_kwargs = {PLAN_CONTEXT_PENDING_USER: connection.user}
|
||||||
|
return self._handle_login_flow(self.source.authentication_flow, **flow_kwargs)
|
||||||
|
|
||||||
|
def handle_existing_user_link(
|
||||||
|
self,
|
||||||
|
connection: UserSourceConnection,
|
||||||
|
) -> HttpResponse:
|
||||||
|
"""Handler when the user was already authenticated and linked an external source
|
||||||
|
to their account."""
|
||||||
|
# Connection has already been saved
|
||||||
|
Event.new(
|
||||||
|
EventAction.SOURCE_LINKED,
|
||||||
|
message="Linked Source",
|
||||||
|
source=self.source,
|
||||||
|
).from_http(self.request)
|
||||||
|
messages.success(
|
||||||
|
self.request,
|
||||||
|
_("Successfully linked %(source)s!" % {"source": self.source.name}),
|
||||||
|
)
|
||||||
|
# When request isn't authenticated we jump straight to auth
|
||||||
|
if not self.request.user.is_authenticated:
|
||||||
|
return self.handle_auth_user(connection)
|
||||||
|
return redirect(
|
||||||
|
reverse(
|
||||||
|
"authentik_core:if-admin",
|
||||||
|
)
|
||||||
|
+ f"#/user;page-{self.source.slug}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_enroll(
|
||||||
|
self,
|
||||||
|
connection: UserSourceConnection,
|
||||||
|
) -> HttpResponse:
|
||||||
|
"""User was not authenticated and previous request was not authenticated."""
|
||||||
|
messages.success(
|
||||||
|
self.request,
|
||||||
|
_(
|
||||||
|
"Successfully authenticated with %(source)s!"
|
||||||
|
% {"source": self.source.name}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# We run the Flow planner here so we can pass the Pending user in the context
|
||||||
|
if not self.source.enrollment_flow:
|
||||||
|
self._logger.warning("source has no enrollment flow")
|
||||||
|
return HttpResponseBadRequest()
|
||||||
|
return self._handle_login_flow(
|
||||||
|
self.source.enrollment_flow,
|
||||||
|
**{
|
||||||
|
PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info),
|
||||||
|
PLAN_CONTEXT_SOURCES_CONNECTION: connection,
|
||||||
|
},
|
||||||
|
)
|
@ -1,32 +1,30 @@
|
|||||||
"""OAuth Stages"""
|
"""Source flow manager stages"""
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User, UserSourceConnection
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.flows.stage import StageView
|
from authentik.flows.stage import StageView
|
||||||
from authentik.sources.oauth.models import UserOAuthSourceConnection
|
|
||||||
|
|
||||||
PLAN_CONTEXT_SOURCES_OAUTH_ACCESS = "sources_oauth_access"
|
PLAN_CONTEXT_SOURCES_CONNECTION = "goauthentik.io/sources/connection"
|
||||||
|
|
||||||
|
|
||||||
class PostUserEnrollmentStage(StageView):
|
class PostUserEnrollmentStage(StageView):
|
||||||
"""Dynamically injected stage which saves the OAuth Connection after
|
"""Dynamically injected stage which saves the Connection after
|
||||||
the user has been enrolled."""
|
the user has been enrolled."""
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Stage used after the user has been enrolled"""
|
"""Stage used after the user has been enrolled"""
|
||||||
access: UserOAuthSourceConnection = self.executor.plan.context[
|
connection: UserSourceConnection = self.executor.plan.context[
|
||||||
PLAN_CONTEXT_SOURCES_OAUTH_ACCESS
|
PLAN_CONTEXT_SOURCES_CONNECTION
|
||||||
]
|
]
|
||||||
user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
||||||
access.user = user
|
connection.user = user
|
||||||
access.save()
|
connection.save()
|
||||||
UserOAuthSourceConnection.objects.filter(pk=access.pk).update(user=user)
|
|
||||||
Event.new(
|
Event.new(
|
||||||
EventAction.SOURCE_LINKED,
|
EventAction.SOURCE_LINKED,
|
||||||
message="Linked OAuth Source",
|
message="Linked Source",
|
||||||
source=access.source,
|
source=connection.source,
|
||||||
).from_http(self.request)
|
).from_http(self.request)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
@ -14,6 +14,8 @@
|
|||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}?v={{ ak_version }}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}?v={{ ak_version }}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}?v={{ ak_version }}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}?v={{ ak_version }}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}?v={{ ak_version }}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}?v={{ ak_version }}">
|
||||||
|
{% block head_before %}
|
||||||
|
{% endblock %}
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}?v={{ ak_version }}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}?v={{ ak_version }}">
|
||||||
<script src="{% static 'dist/poly.js' %}?v={{ ak_version }}" type="module"></script>
|
<script src="{% static 'dist/poly.js' %}?v={{ ak_version }}" type="module"></script>
|
||||||
<script>window["polymerSkipLoadingFontRoboto"] = true;</script>
|
<script>window["polymerSkipLoadingFontRoboto"] = true;</script>
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<section class="pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state">
|
<div class="pf-c-empty-state">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
<i class="fas fa-exclamation-circle pf-c-empty-state__icon" aria-hidden="true"></i>
|
<i class="fas fa-exclamation-circle pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-message-container></ak-message-container>
|
<ak-message-container></ak-message-container>
|
||||||
<ak-interface-admin>
|
<ak-interface-admin>
|
||||||
<section class="ak-initial-load pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
|
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
|
||||||
|
@ -3,6 +3,10 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
|
{% block head_before %}
|
||||||
|
<script>ShadyDOM = { force: !navigator.webdriver };</script>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
@ -10,7 +14,7 @@
|
|||||||
{% block body %}
|
{% block body %}
|
||||||
<ak-message-container></ak-message-container>
|
<ak-message-container></ak-message-container>
|
||||||
<ak-flow-executor>
|
<ak-flow-executor>
|
||||||
<section class="ak-initial-load pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
|
||||||
<div class="pf-c-empty-state" style="height: 100vh;">
|
<div class="pf-c-empty-state" style="height: 100vh;">
|
||||||
<div class="pf-c-empty-state__content">
|
<div class="pf-c-empty-state__content">
|
||||||
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
|
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
|
||||||
|
@ -3,6 +3,10 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
|
{% block head_before %}
|
||||||
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<div class="pf-c-background-image">
|
<div class="pf-c-background-image">
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
||||||
|
125
authentik/core/tests/test_applications_api.py
Normal file
125
authentik/core/tests/test_applications_api.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
"""Test Applications API"""
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils.encoding import force_str
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application, User
|
||||||
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
|
from authentik.policies.models import PolicyBinding
|
||||||
|
|
||||||
|
|
||||||
|
class TestApplicationsAPI(APITestCase):
|
||||||
|
"""Test applications API"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.user = User.objects.get(username="akadmin")
|
||||||
|
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
||||||
|
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
target=self.denied,
|
||||||
|
policy=DummyPolicy.objects.create(
|
||||||
|
name="deny", result=False, wait_min=1, wait_max=2
|
||||||
|
),
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_check_access(self):
|
||||||
|
"""Test check_access operation"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:application-check-access",
|
||||||
|
kwargs={"slug": self.allowed.slug},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 204)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:application-check-access",
|
||||||
|
kwargs={"slug": self.denied.slug},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
|
||||||
|
def test_list(self):
|
||||||
|
"""Test list operation without superuser_full_list"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
response = self.client.get(reverse("authentik_api:application-list"))
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"pagination": {
|
||||||
|
"next": 0,
|
||||||
|
"previous": 0,
|
||||||
|
"count": 2,
|
||||||
|
"current": 1,
|
||||||
|
"total_pages": 1,
|
||||||
|
"start_index": 1,
|
||||||
|
"end_index": 2,
|
||||||
|
},
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"pk": str(self.allowed.pk),
|
||||||
|
"name": "allowed",
|
||||||
|
"slug": "allowed",
|
||||||
|
"provider": None,
|
||||||
|
"provider_obj": None,
|
||||||
|
"launch_url": None,
|
||||||
|
"meta_launch_url": "",
|
||||||
|
"meta_icon": None,
|
||||||
|
"meta_description": "",
|
||||||
|
"meta_publisher": "",
|
||||||
|
"policy_engine_mode": "any",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_list_superuser_full_list(self):
|
||||||
|
"""Test list operation with superuser_full_list"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:application-list") + "?superuser_full_list=true"
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"pagination": {
|
||||||
|
"next": 0,
|
||||||
|
"previous": 0,
|
||||||
|
"count": 2,
|
||||||
|
"current": 1,
|
||||||
|
"total_pages": 1,
|
||||||
|
"start_index": 1,
|
||||||
|
"end_index": 2,
|
||||||
|
},
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"pk": str(self.allowed.pk),
|
||||||
|
"name": "allowed",
|
||||||
|
"slug": "allowed",
|
||||||
|
"provider": None,
|
||||||
|
"provider_obj": None,
|
||||||
|
"launch_url": None,
|
||||||
|
"meta_launch_url": "",
|
||||||
|
"meta_icon": None,
|
||||||
|
"meta_description": "",
|
||||||
|
"meta_publisher": "",
|
||||||
|
"policy_engine_mode": "any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"launch_url": None,
|
||||||
|
"meta_description": "",
|
||||||
|
"meta_icon": None,
|
||||||
|
"meta_launch_url": "",
|
||||||
|
"meta_publisher": "",
|
||||||
|
"name": "denied",
|
||||||
|
"pk": str(self.denied.pk),
|
||||||
|
"policy_engine_mode": "any",
|
||||||
|
"provider": None,
|
||||||
|
"provider_obj": None,
|
||||||
|
"slug": "denied",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
@ -1,11 +1,14 @@
|
|||||||
"""authentik core models tests"""
|
"""authentik core models tests"""
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
from typing import Callable, Type
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
from authentik.core.models import Token
|
from authentik.core.models import Provider, Source, Token
|
||||||
|
from authentik.flows.models import Stage
|
||||||
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
class TestModels(TestCase):
|
class TestModels(TestCase):
|
||||||
@ -18,9 +21,46 @@ class TestModels(TestCase):
|
|||||||
self.assertTrue(token.is_expired)
|
self.assertTrue(token.is_expired)
|
||||||
|
|
||||||
def test_token_expire_no_expire(self):
|
def test_token_expire_no_expire(self):
|
||||||
"""Test token expiring with "expiring" set """
|
"""Test token expiring with "expiring" set"""
|
||||||
token = Token.objects.create(
|
token = Token.objects.create(
|
||||||
expires=now(), user=get_anonymous_user(), expiring=False
|
expires=now(), user=get_anonymous_user(), expiring=False
|
||||||
)
|
)
|
||||||
sleep(0.5)
|
sleep(0.5)
|
||||||
self.assertFalse(token.is_expired)
|
self.assertFalse(token.is_expired)
|
||||||
|
|
||||||
|
|
||||||
|
def source_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||||
|
"""Test source"""
|
||||||
|
|
||||||
|
def tester(self: TestModels):
|
||||||
|
model_class = None
|
||||||
|
if test_model._meta.abstract:
|
||||||
|
model_class = test_model.__bases__[0]()
|
||||||
|
else:
|
||||||
|
model_class = test_model()
|
||||||
|
model_class.slug = "test"
|
||||||
|
self.assertIsNotNone(model_class.component)
|
||||||
|
_ = model_class.ui_login_button
|
||||||
|
_ = model_class.ui_user_settings
|
||||||
|
|
||||||
|
return tester
|
||||||
|
|
||||||
|
|
||||||
|
def provider_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||||
|
"""Test provider"""
|
||||||
|
|
||||||
|
def tester(self: TestModels):
|
||||||
|
model_class = None
|
||||||
|
if test_model._meta.abstract:
|
||||||
|
model_class = test_model.__bases__[0]()
|
||||||
|
else:
|
||||||
|
model_class = test_model()
|
||||||
|
self.assertIsNotNone(model_class.component)
|
||||||
|
|
||||||
|
return tester
|
||||||
|
|
||||||
|
|
||||||
|
for model in all_subclasses(Source):
|
||||||
|
setattr(TestModels, f"test_model_{model.__name__}", source_tester_factory(model))
|
||||||
|
for model in all_subclasses(Provider):
|
||||||
|
setattr(TestModels, f"test_model_{model.__name__}", provider_tester_factory(model))
|
||||||
|
@ -2,9 +2,10 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField, DictField
|
||||||
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.flows.challenge import Challenge
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -14,8 +15,8 @@ class UILoginButton:
|
|||||||
# Name, ran through i18n
|
# Name, ran through i18n
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
# URL Which Button points to
|
# Challenge which is presented to the user when they click the button
|
||||||
url: str
|
challenge: Challenge
|
||||||
|
|
||||||
# Icon URL, used as-is
|
# Icon URL, used as-is
|
||||||
icon_url: Optional[str] = None
|
icon_url: Optional[str] = None
|
||||||
@ -25,7 +26,7 @@ class UILoginButtonSerializer(PassiveSerializer):
|
|||||||
"""Serializer for Login buttons of sources"""
|
"""Serializer for Login buttons of sources"""
|
||||||
|
|
||||||
name = CharField()
|
name = CharField()
|
||||||
url = CharField()
|
challenge = DictField()
|
||||||
icon_url = CharField(required=False, allow_null=True)
|
icon_url = CharField(required=False, allow_null=True)
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,9 @@ import django_filters
|
|||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
from cryptography.x509 import load_pem_x509_certificate
|
from cryptography.x509 import load_pem_x509_certificate
|
||||||
|
from django.http.response import HttpResponse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from drf_yasg import openapi
|
||||||
from drf_yasg.utils import swagger_auto_schema
|
from drf_yasg.utils import swagger_auto_schema
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
@ -39,7 +41,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
|||||||
"""Show if this keypair has a private key configured or not"""
|
"""Show if this keypair has a private key configured or not"""
|
||||||
return instance.key_data != "" and instance.key_data is not None
|
return instance.key_data != "" and instance.key_data is not None
|
||||||
|
|
||||||
def validate_certificate_data(self, value):
|
def validate_certificate_data(self, value: str) -> str:
|
||||||
"""Verify that input is a valid PEM x509 Certificate"""
|
"""Verify that input is a valid PEM x509 Certificate"""
|
||||||
try:
|
try:
|
||||||
load_pem_x509_certificate(value.encode("utf-8"), default_backend())
|
load_pem_x509_certificate(value.encode("utf-8"), default_backend())
|
||||||
@ -47,7 +49,7 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
|||||||
raise ValidationError("Unable to load certificate.")
|
raise ValidationError("Unable to load certificate.")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate_key_data(self, value):
|
def validate_key_data(self, value: str) -> str:
|
||||||
"""Verify that input is a valid PEM RSA Key"""
|
"""Verify that input is a valid PEM RSA Key"""
|
||||||
# Since this field is optional, data can be empty.
|
# Since this field is optional, data can be empty.
|
||||||
if value != "":
|
if value != "":
|
||||||
@ -57,8 +59,10 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
|||||||
password=None,
|
password=None,
|
||||||
backend=default_backend(),
|
backend=default_backend(),
|
||||||
)
|
)
|
||||||
except ValueError:
|
except (ValueError, TypeError):
|
||||||
raise ValidationError("Unable to load private key.")
|
raise ValidationError(
|
||||||
|
"Unable to load private key (possibly encrypted?)."
|
||||||
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -143,7 +147,16 @@ class CertificateKeyPairViewSet(ModelViewSet):
|
|||||||
serializer = self.get_serializer(instance)
|
serializer = self.get_serializer(instance)
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
|
|
||||||
@swagger_auto_schema(responses={200: CertificateDataSerializer(many=False)})
|
@swagger_auto_schema(
|
||||||
|
manual_parameters=[
|
||||||
|
openapi.Parameter(
|
||||||
|
name="download",
|
||||||
|
in_=openapi.IN_QUERY,
|
||||||
|
type=openapi.TYPE_BOOLEAN,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
responses={200: CertificateDataSerializer(many=False)},
|
||||||
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
# pylint: disable=invalid-name, unused-argument
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def view_certificate(self, request: Request, pk: str) -> Response:
|
def view_certificate(self, request: Request, pk: str) -> Response:
|
||||||
@ -154,11 +167,29 @@ class CertificateKeyPairViewSet(ModelViewSet):
|
|||||||
secret=certificate,
|
secret=certificate,
|
||||||
type="certificate",
|
type="certificate",
|
||||||
).from_http(request)
|
).from_http(request)
|
||||||
|
if "download" in request._request.GET:
|
||||||
|
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||||
|
response = HttpResponse(
|
||||||
|
certificate.certificate_data, content_type="application/x-pem-file"
|
||||||
|
)
|
||||||
|
response[
|
||||||
|
"Content-Disposition"
|
||||||
|
] = f'attachment; filename="{certificate.name}_certificate.pem"'
|
||||||
|
return response
|
||||||
return Response(
|
return Response(
|
||||||
CertificateDataSerializer({"data": certificate.certificate_data}).data
|
CertificateDataSerializer({"data": certificate.certificate_data}).data
|
||||||
)
|
)
|
||||||
|
|
||||||
@swagger_auto_schema(responses={200: CertificateDataSerializer(many=False)})
|
@swagger_auto_schema(
|
||||||
|
manual_parameters=[
|
||||||
|
openapi.Parameter(
|
||||||
|
name="download",
|
||||||
|
in_=openapi.IN_QUERY,
|
||||||
|
type=openapi.TYPE_BOOLEAN,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
responses={200: CertificateDataSerializer(many=False)},
|
||||||
|
)
|
||||||
@action(detail=True, pagination_class=None, filter_backends=[])
|
@action(detail=True, pagination_class=None, filter_backends=[])
|
||||||
# pylint: disable=invalid-name, unused-argument
|
# pylint: disable=invalid-name, unused-argument
|
||||||
def view_private_key(self, request: Request, pk: str) -> Response:
|
def view_private_key(self, request: Request, pk: str) -> Response:
|
||||||
@ -169,4 +200,13 @@ class CertificateKeyPairViewSet(ModelViewSet):
|
|||||||
secret=certificate,
|
secret=certificate,
|
||||||
type="private_key",
|
type="private_key",
|
||||||
).from_http(request)
|
).from_http(request)
|
||||||
|
if "download" in request._request.GET:
|
||||||
|
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
|
||||||
|
response = HttpResponse(
|
||||||
|
certificate.key_data, content_type="application/x-pem-file"
|
||||||
|
)
|
||||||
|
response[
|
||||||
|
"Content-Disposition"
|
||||||
|
] = f'attachment; filename="{certificate.name}_private_key.pem"'
|
||||||
|
return response
|
||||||
return Response(CertificateDataSerializer({"data": certificate.key_data}).data)
|
return Response(CertificateDataSerializer({"data": certificate.key_data}).data)
|
||||||
|
@ -33,7 +33,7 @@ class CertificateBuilder:
|
|||||||
def save(self) -> Optional[CertificateKeyPair]:
|
def save(self) -> Optional[CertificateKeyPair]:
|
||||||
"""Save generated certificate as model"""
|
"""Save generated certificate as model"""
|
||||||
if not self.__certificate:
|
if not self.__certificate:
|
||||||
return None
|
raise ValueError("Certificated hasn't been built yet")
|
||||||
return CertificateKeyPair.objects.create(
|
return CertificateKeyPair.objects.create(
|
||||||
name=self.common_name,
|
name=self.common_name,
|
||||||
certificate_data=self.certificate,
|
certificate_data=self.certificate,
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from django.urls import reverse
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
from authentik.crypto.api import CertificateKeyPairSerializer
|
from authentik.crypto.api import CertificateKeyPairSerializer
|
||||||
from authentik.crypto.builder import CertificateBuilder
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
@ -37,6 +39,8 @@ class TestCrypto(TestCase):
|
|||||||
"""Test Builder"""
|
"""Test Builder"""
|
||||||
builder = CertificateBuilder()
|
builder = CertificateBuilder()
|
||||||
builder.common_name = "test-cert"
|
builder.common_name = "test-cert"
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
builder.save()
|
||||||
builder.build(
|
builder.build(
|
||||||
subject_alt_names=[],
|
subject_alt_names=[],
|
||||||
validity_days=3,
|
validity_days=3,
|
||||||
@ -45,3 +49,45 @@ class TestCrypto(TestCase):
|
|||||||
now = datetime.datetime.today()
|
now = datetime.datetime.today()
|
||||||
self.assertEqual(instance.name, "test-cert")
|
self.assertEqual(instance.name, "test-cert")
|
||||||
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
|
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
|
||||||
|
|
||||||
|
def test_certificate_download(self):
|
||||||
|
"""Test certificate export (download)"""
|
||||||
|
self.client.force_login(User.objects.get(username="akadmin"))
|
||||||
|
keypair = CertificateKeyPair.objects.first()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:certificatekeypair-view-certificate",
|
||||||
|
kwargs={"pk": keypair.pk},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(200, response.status_code)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:certificatekeypair-view-certificate",
|
||||||
|
kwargs={"pk": keypair.pk},
|
||||||
|
)
|
||||||
|
+ "?download",
|
||||||
|
)
|
||||||
|
self.assertEqual(200, response.status_code)
|
||||||
|
self.assertIn("Content-Disposition", response)
|
||||||
|
|
||||||
|
def test_private_key_download(self):
|
||||||
|
"""Test private_key export (download)"""
|
||||||
|
self.client.force_login(User.objects.get(username="akadmin"))
|
||||||
|
keypair = CertificateKeyPair.objects.first()
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:certificatekeypair-view-private-key",
|
||||||
|
kwargs={"pk": keypair.pk},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(200, response.status_code)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_api:certificatekeypair-view-private-key",
|
||||||
|
kwargs={"pk": keypair.pk},
|
||||||
|
)
|
||||||
|
+ "?download",
|
||||||
|
)
|
||||||
|
self.assertEqual(200, response.status_code)
|
||||||
|
self.assertIn("Content-Disposition", response)
|
||||||
|
@ -8,10 +8,10 @@ from rest_framework.decorators import action
|
|||||||
from rest_framework.fields import CharField, DictField, IntegerField
|
from rest_framework.fields import CharField, DictField, IntegerField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer, Serializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.utils import TypeCreateSerializer
|
from authentik.core.api.utils import PassiveSerializer, TypeCreateSerializer
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
|
||||||
|
|
||||||
@ -38,31 +38,19 @@ class EventSerializer(ModelSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class EventTopPerUserParams(Serializer):
|
class EventTopPerUserParams(PassiveSerializer):
|
||||||
"""Query params for top_per_user"""
|
"""Query params for top_per_user"""
|
||||||
|
|
||||||
top_n = IntegerField(default=15)
|
top_n = IntegerField(default=15)
|
||||||
|
|
||||||
def create(self, request: Request) -> Response:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def update(self, request: Request) -> Response:
|
class EventTopPerUserSerializer(PassiveSerializer):
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class EventTopPerUserSerializer(Serializer):
|
|
||||||
"""Response object of Event's top_per_user"""
|
"""Response object of Event's top_per_user"""
|
||||||
|
|
||||||
application = DictField()
|
application = DictField()
|
||||||
counted_events = IntegerField()
|
counted_events = IntegerField()
|
||||||
unique_users = IntegerField()
|
unique_users = IntegerField()
|
||||||
|
|
||||||
def create(self, request: Request) -> Response:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def update(self, request: Request) -> Response:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class EventsFilter(django_filters.FilterSet):
|
class EventsFilter(django_filters.FilterSet):
|
||||||
"""Filter for events"""
|
"""Filter for events"""
|
||||||
@ -132,7 +120,7 @@ class EventViewSet(ReadOnlyModelViewSet):
|
|||||||
def top_per_user(self, request: Request):
|
def top_per_user(self, request: Request):
|
||||||
"""Get the top_n events grouped by user count"""
|
"""Get the top_n events grouped by user count"""
|
||||||
filtered_action = request.query_params.get("action", EventAction.LOGIN)
|
filtered_action = request.query_params.get("action", EventAction.LOGIN)
|
||||||
top_n = request.query_params.get("top_n", 15)
|
top_n = int(request.query_params.get("top_n", "15"))
|
||||||
return Response(
|
return Response(
|
||||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||||
.filter(action=filtered_action)
|
.filter(action=filtered_action)
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
"""Notification API Views"""
|
"""Notification API Views"""
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from guardian.utils import get_anonymous_user
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.fields import ReadOnlyField
|
from rest_framework.fields import ReadOnlyField
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
@ -46,8 +49,12 @@ class NotificationViewSet(
|
|||||||
"event",
|
"event",
|
||||||
"seen",
|
"seen",
|
||||||
]
|
]
|
||||||
|
filter_backends = [
|
||||||
|
DjangoFilterBackend,
|
||||||
|
OrderingFilter,
|
||||||
|
SearchFilter,
|
||||||
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
if not self.request:
|
user = self.request.user if self.request else get_anonymous_user()
|
||||||
return super().get_queryset()
|
return Notification.objects.filter(user=user.pk)
|
||||||
return Notification.objects.filter(user=self.request.user)
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""Event notification tasks"""
|
"""Event notification tasks"""
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from structlog import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.events.models import (
|
from authentik.events.models import (
|
||||||
@ -35,7 +35,10 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
|
|||||||
LOGGER.warning("event doesn't exist yet or anymore", event_uuid=event_uuid)
|
LOGGER.warning("event doesn't exist yet or anymore", event_uuid=event_uuid)
|
||||||
return
|
return
|
||||||
event: Event = events.first()
|
event: Event = events.first()
|
||||||
trigger: NotificationRule = NotificationRule.objects.get(name=trigger_name)
|
triggers: NotificationRule = NotificationRule.objects.filter(name=trigger_name)
|
||||||
|
if not triggers.exists():
|
||||||
|
return
|
||||||
|
trigger = triggers.first()
|
||||||
|
|
||||||
if "policy_uuid" in event.context:
|
if "policy_uuid" in event.context:
|
||||||
policy_uuid = event.context["policy_uuid"]
|
policy_uuid = event.context["policy_uuid"]
|
||||||
@ -58,7 +61,13 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
|
|||||||
return
|
return
|
||||||
|
|
||||||
LOGGER.debug("e(trigger): checking if trigger applies", trigger=trigger)
|
LOGGER.debug("e(trigger): checking if trigger applies", trigger=trigger)
|
||||||
user = User.objects.filter(pk=event.user.get("pk")).first() or get_anonymous_user()
|
try:
|
||||||
|
user = (
|
||||||
|
User.objects.filter(pk=event.user.get("pk")).first() or get_anonymous_user()
|
||||||
|
)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
LOGGER.warning("e(trigger): failed to get user", trigger=trigger)
|
||||||
|
return
|
||||||
policy_engine = PolicyEngine(trigger, user)
|
policy_engine = PolicyEngine(trigger, user)
|
||||||
policy_engine.mode = PolicyEngineMode.MODE_ANY
|
policy_engine.mode = PolicyEngineMode.MODE_ANY
|
||||||
policy_engine.empty_result = False
|
policy_engine.empty_result = False
|
||||||
|
32
authentik/flows/tests/test_stage_model.py
Normal file
32
authentik/flows/tests/test_stage_model.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""base model tests"""
|
||||||
|
from typing import Callable, Type
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.flows.models import Stage
|
||||||
|
from authentik.flows.stage import StageView
|
||||||
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
|
class TestModels(TestCase):
|
||||||
|
"""Generic model properties tests"""
|
||||||
|
|
||||||
|
|
||||||
|
def model_tester_factory(test_model: Type[Stage]) -> Callable:
|
||||||
|
"""Test a form"""
|
||||||
|
|
||||||
|
def tester(self: TestModels):
|
||||||
|
model_class = None
|
||||||
|
if test_model._meta.abstract:
|
||||||
|
model_class = test_model.__bases__[0]()
|
||||||
|
else:
|
||||||
|
model_class = test_model()
|
||||||
|
self.assertTrue(issubclass(model_class.type, StageView))
|
||||||
|
self.assertIsNotNone(test_model.component)
|
||||||
|
_ = test_model.ui_user_settings
|
||||||
|
|
||||||
|
return tester
|
||||||
|
|
||||||
|
|
||||||
|
for model in all_subclasses(Stage):
|
||||||
|
setattr(TestModels, f"test_model_{model.__name__}", model_tester_factory(model))
|
@ -13,7 +13,7 @@ from django.db.models.query_utils import Q
|
|||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
from structlog import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||||
from authentik.flows.transfer.common import (
|
from authentik.flows.transfer.common import (
|
||||||
@ -160,7 +160,7 @@ class FlowImporter:
|
|||||||
try:
|
try:
|
||||||
model: SerializerModel = apps.get_model(model_app_label, model_name)
|
model: SerializerModel = apps.get_model(model_app_label, model_name)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
self.logger.error(
|
self.logger.warning(
|
||||||
"app or model does not exist", app=model_app_label, model=model_name
|
"app or model does not exist", app=model_app_label, model=model_name
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
@ -168,7 +168,7 @@ class FlowImporter:
|
|||||||
try:
|
try:
|
||||||
serializer = self._validate_single(entry)
|
serializer = self._validate_single(entry)
|
||||||
except EntryInvalidError as exc:
|
except EntryInvalidError as exc:
|
||||||
self.logger.error("entry not valid", entry=entry, error=exc)
|
self.logger.warning("entry not valid", entry=entry, error=exc)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
model = serializer.save()
|
model = serializer.save()
|
||||||
|
@ -14,6 +14,7 @@ from drf_yasg import openapi
|
|||||||
from drf_yasg.utils import no_body, swagger_auto_schema
|
from drf_yasg.utils import no_body, swagger_auto_schema
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_DEBUG
|
from authentik.core.models import USER_ATTRIBUTE_DEBUG
|
||||||
@ -152,7 +153,8 @@ class FlowExecutorView(APIView):
|
|||||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
||||||
return to_stage_response(request, stage_response)
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
self._logger.exception(exc)
|
capture_exception(exc)
|
||||||
|
self._logger.warning(exc)
|
||||||
return to_stage_response(request, FlowErrorResponse(request, exc))
|
return to_stage_response(request, FlowErrorResponse(request, exc))
|
||||||
|
|
||||||
@swagger_auto_schema(
|
@swagger_auto_schema(
|
||||||
@ -180,7 +182,8 @@ class FlowExecutorView(APIView):
|
|||||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
||||||
return to_stage_response(request, stage_response)
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
self._logger.exception(exc)
|
capture_exception(exc)
|
||||||
|
self._logger.warning(exc)
|
||||||
return to_stage_response(request, FlowErrorResponse(request, exc))
|
return to_stage_response(request, FlowErrorResponse(request, exc))
|
||||||
|
|
||||||
def _initiate_plan(self) -> FlowPlan:
|
def _initiate_plan(self) -> FlowPlan:
|
||||||
@ -295,7 +298,7 @@ class CancelView(View):
|
|||||||
if SESSION_KEY_PLAN in request.session:
|
if SESSION_KEY_PLAN in request.session:
|
||||||
del request.session[SESSION_KEY_PLAN]
|
del request.session[SESSION_KEY_PLAN]
|
||||||
LOGGER.debug("Canceled current plan")
|
LOGGER.debug("Canceled current plan")
|
||||||
return redirect("authentik_core:root-redirect")
|
return redirect("authentik_core:default-invalidation")
|
||||||
|
|
||||||
|
|
||||||
class ToDefaultFlow(View):
|
class ToDefaultFlow(View):
|
||||||
|
@ -86,6 +86,13 @@ class ConfigLoader:
|
|||||||
url = urlparse(value)
|
url = urlparse(value)
|
||||||
if url.scheme == "env":
|
if url.scheme == "env":
|
||||||
value = os.getenv(url.netloc, url.query)
|
value = os.getenv(url.netloc, url.query)
|
||||||
|
if url.scheme == "file":
|
||||||
|
try:
|
||||||
|
with open(url.netloc, "r") as _file:
|
||||||
|
value = _file.read()
|
||||||
|
except OSError:
|
||||||
|
self._log("error", f"Failed to read config value from {url.netloc}")
|
||||||
|
value = url.query
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def update_from_file(self, path: str):
|
def update_from_file(self, path: str):
|
||||||
@ -163,6 +170,7 @@ class ConfigLoader:
|
|||||||
# Walk each component of the path
|
# Walk each component of the path
|
||||||
path_parts = path.split(sep)
|
path_parts = path.split(sep)
|
||||||
for comp in path_parts[:-1]:
|
for comp in path_parts[:-1]:
|
||||||
|
# pyright: reportGeneralTypeIssues=false
|
||||||
if comp not in root:
|
if comp not in root:
|
||||||
root[comp] = {}
|
root[comp] = {}
|
||||||
root = root.get(comp)
|
root = root.get(comp)
|
||||||
|
@ -3,8 +3,13 @@ postgresql:
|
|||||||
host: localhost
|
host: localhost
|
||||||
name: authentik
|
name: authentik
|
||||||
user: authentik
|
user: authentik
|
||||||
|
port: 5432
|
||||||
password: 'env://POSTGRES_PASSWORD'
|
password: 'env://POSTGRES_PASSWORD'
|
||||||
|
|
||||||
|
web:
|
||||||
|
listen: 0.0.0.0:9000
|
||||||
|
listen_tls: 0.0.0.0:9443
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
host: localhost
|
host: localhost
|
||||||
password: ''
|
password: ''
|
||||||
@ -34,7 +39,10 @@ email:
|
|||||||
from: authentik@localhost
|
from: authentik@localhost
|
||||||
|
|
||||||
outposts:
|
outposts:
|
||||||
docker_image_base: "beryju/authentik" # this is prepended to -proxy:version
|
# Placeholders:
|
||||||
|
# %(type)s: Outpost type; proxy, ldap, etc
|
||||||
|
# %(version)s: Current version; 2021.4.1
|
||||||
|
docker_image_base: "beryju/authentik-%(type)s:%(version)s"
|
||||||
|
|
||||||
authentik:
|
authentik:
|
||||||
avatars: gravatar # gravatar or none
|
avatars: gravatar # gravatar or none
|
||||||
|
@ -5,21 +5,39 @@ from aioredis.errors import ConnectionClosedError, ReplyError
|
|||||||
from billiard.exceptions import WorkerLostError
|
from billiard.exceptions import WorkerLostError
|
||||||
from botocore.client import ClientError
|
from botocore.client import ClientError
|
||||||
from celery.exceptions import CeleryError
|
from celery.exceptions import CeleryError
|
||||||
|
from channels.middleware import BaseMiddleware
|
||||||
from channels_redis.core import ChannelFull
|
from channels_redis.core import ChannelFull
|
||||||
from django.core.exceptions import DisallowedHost, ValidationError
|
from django.core.exceptions import SuspiciousOperation, ValidationError
|
||||||
from django.db import InternalError, OperationalError, ProgrammingError
|
from django.db import InternalError, OperationalError, ProgrammingError
|
||||||
|
from django.http.response import Http404
|
||||||
from django_redis.exceptions import ConnectionInterrupted
|
from django_redis.exceptions import ConnectionInterrupted
|
||||||
from docker.errors import DockerException
|
from docker.errors import DockerException
|
||||||
from ldap3.core.exceptions import LDAPException
|
from ldap3.core.exceptions import LDAPException
|
||||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||||
from redis.exceptions import RedisError, ResponseError
|
from redis.exceptions import RedisError, ResponseError
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
|
from sentry_sdk import Hub
|
||||||
|
from sentry_sdk.tracing import Transaction
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from websockets.exceptions import WebSocketException
|
from websockets.exceptions import WebSocketException
|
||||||
|
|
||||||
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class SentryWSMiddleware(BaseMiddleware):
|
||||||
|
"""Sentry Websocket middleweare to set the transaction name based on
|
||||||
|
consumer class path"""
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
transaction: Optional[Transaction] = Hub.current.scope.transaction
|
||||||
|
class_path = class_to_path(self.inner.consumer_class)
|
||||||
|
if transaction:
|
||||||
|
transaction.name = class_path
|
||||||
|
return await self.inner(scope, receive, send)
|
||||||
|
|
||||||
|
|
||||||
class SentryIgnoredException(Exception):
|
class SentryIgnoredException(Exception):
|
||||||
"""Base Class for all errors that are suppressed, and not sent to sentry."""
|
"""Base Class for all errors that are suppressed, and not sent to sentry."""
|
||||||
|
|
||||||
@ -36,7 +54,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
OperationalError,
|
OperationalError,
|
||||||
InternalError,
|
InternalError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
DisallowedHost,
|
SuspiciousOperation,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
# Redis errors
|
# Redis errors
|
||||||
RedisConnectionError,
|
RedisConnectionError,
|
||||||
@ -61,6 +79,8 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
|||||||
LDAPException,
|
LDAPException,
|
||||||
# Docker errors
|
# Docker errors
|
||||||
DockerException,
|
DockerException,
|
||||||
|
# End-user errors
|
||||||
|
Http404,
|
||||||
)
|
)
|
||||||
if "exc_info" in hint:
|
if "exc_info" in hint:
|
||||||
_, exc_value, _ = hint["exc_info"]
|
_, exc_value, _ = hint["exc_info"]
|
||||||
|
16
authentik/lib/tests/test_utils_reflection.py
Normal file
16
authentik/lib/tests/test_utils_reflection.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
"""Test Reflection utils"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.lib.utils.reflection import path_to_class
|
||||||
|
|
||||||
|
|
||||||
|
class TestReflectionUtils(TestCase):
|
||||||
|
"""Test Reflection-utils"""
|
||||||
|
|
||||||
|
def test_path_to_class(self):
|
||||||
|
"""Test path_to_class"""
|
||||||
|
self.assertIsNone(path_to_class(None))
|
||||||
|
self.assertEqual(path_to_class("datetime.datetime"), datetime)
|
@ -3,6 +3,9 @@ from typing import Any, Optional
|
|||||||
|
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
|
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||||
|
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
||||||
|
|
||||||
|
|
||||||
def _get_client_ip_from_meta(meta: dict[str, Any]) -> Optional[str]:
|
def _get_client_ip_from_meta(meta: dict[str, Any]) -> Optional[str]:
|
||||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||||
@ -18,9 +21,27 @@ def _get_client_ip_from_meta(meta: dict[str, Any]) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
||||||
|
"""Get the actual remote IP when set by an outpost. Only
|
||||||
|
allowed when the request is authenticated, by a user with USER_ATTRIBUTE_CAN_OVERRIDE_IP set
|
||||||
|
to outpost"""
|
||||||
|
if not hasattr(request, "user"):
|
||||||
|
return None
|
||||||
|
if not request.user.is_authenticated:
|
||||||
|
return None
|
||||||
|
if OUTPOST_REMOTE_IP_HEADER not in request.META:
|
||||||
|
return None
|
||||||
|
if request.user.attributes.get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
||||||
|
return None
|
||||||
|
return request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip(request: Optional[HttpRequest]) -> Optional[str]:
|
def get_client_ip(request: Optional[HttpRequest]) -> Optional[str]:
|
||||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||||
Returns none if no IP Could be found"""
|
Returns none if no IP Could be found"""
|
||||||
if request:
|
if request:
|
||||||
|
override = _get_outpost_override_ip(request)
|
||||||
|
if override:
|
||||||
|
return override
|
||||||
return _get_client_ip_from_meta(request.META)
|
return _get_client_ip_from_meta(request.META)
|
||||||
return None
|
return None
|
||||||
|
@ -1,33 +1,47 @@
|
|||||||
"""Outpost API Views"""
|
"""Outpost API Views"""
|
||||||
|
from dacite.core import from_dict
|
||||||
|
from dacite.exceptions import DaciteError
|
||||||
from drf_yasg.utils import swagger_auto_schema
|
from drf_yasg.utils import swagger_auto_schema
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import BooleanField, CharField, DateTimeField
|
from rest_framework.fields import BooleanField, CharField, DateTimeField
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import JSONField, ModelSerializer
|
from rest_framework.serializers import JSONField, ModelSerializer, ValidationError
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.providers import ProviderSerializer
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
from authentik.outposts.models import Outpost, default_outpost_config
|
from authentik.outposts.models import Outpost, OutpostConfig, default_outpost_config
|
||||||
|
|
||||||
|
|
||||||
class OutpostSerializer(ModelSerializer):
|
class OutpostSerializer(ModelSerializer):
|
||||||
"""Outpost Serializer"""
|
"""Outpost Serializer"""
|
||||||
|
|
||||||
_config = JSONField(validators=[is_dict])
|
config = JSONField(validators=[is_dict], source="_config")
|
||||||
|
# TODO: Remove _config again, this is only here for legacy with older outposts
|
||||||
|
_config = JSONField(validators=[is_dict], read_only=True)
|
||||||
providers_obj = ProviderSerializer(source="providers", many=True, read_only=True)
|
providers_obj = ProviderSerializer(source="providers", many=True, read_only=True)
|
||||||
|
|
||||||
|
def validate_config(self, config) -> dict:
|
||||||
|
"""Check that the config has all required fields"""
|
||||||
|
try:
|
||||||
|
from_dict(OutpostConfig, config)
|
||||||
|
except DaciteError as exc:
|
||||||
|
raise ValidationError(f"Failed to validate config: {str(exc)}") from exc
|
||||||
|
return config
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Outpost
|
model = Outpost
|
||||||
fields = [
|
fields = [
|
||||||
"pk",
|
"pk",
|
||||||
"name",
|
"name",
|
||||||
|
"type",
|
||||||
"providers",
|
"providers",
|
||||||
"providers_obj",
|
"providers_obj",
|
||||||
"service_connection",
|
"service_connection",
|
||||||
"token_identifier",
|
"token_identifier",
|
||||||
|
"config",
|
||||||
"_config",
|
"_config",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1,17 +1,8 @@
|
|||||||
"""authentik outposts app config"""
|
"""authentik outposts app config"""
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from os import R_OK, access
|
|
||||||
from os.path import expanduser
|
|
||||||
from pathlib import Path
|
|
||||||
from socket import gethostname
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.db import ProgrammingError
|
from django.db import ProgrammingError
|
||||||
from docker.constants import DEFAULT_UNIX_SOCKET
|
|
||||||
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
|
||||||
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -27,49 +18,8 @@ class AuthentikOutpostConfig(AppConfig):
|
|||||||
def ready(self):
|
def ready(self):
|
||||||
import_module("authentik.outposts.signals")
|
import_module("authentik.outposts.signals")
|
||||||
try:
|
try:
|
||||||
AuthentikOutpostConfig.init_local_connection()
|
from authentik.outposts.tasks import outpost_local_connection
|
||||||
|
|
||||||
|
outpost_local_connection.delay()
|
||||||
except ProgrammingError:
|
except ProgrammingError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def init_local_connection():
|
|
||||||
"""Check if local kubernetes or docker connections should be created"""
|
|
||||||
from authentik.outposts.models import (
|
|
||||||
DockerServiceConnection,
|
|
||||||
KubernetesServiceConnection,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Explicitly check against token filename, as thats
|
|
||||||
# only present when the integration is enabled
|
|
||||||
if Path(SERVICE_TOKEN_FILENAME).exists():
|
|
||||||
LOGGER.debug("Detected in-cluster Kubernetes Config")
|
|
||||||
if not KubernetesServiceConnection.objects.filter(local=True).exists():
|
|
||||||
LOGGER.debug("Created Service Connection for in-cluster")
|
|
||||||
KubernetesServiceConnection.objects.create(
|
|
||||||
name="Local Kubernetes Cluster", local=True, kubeconfig={}
|
|
||||||
)
|
|
||||||
# For development, check for the existence of a kubeconfig file
|
|
||||||
kubeconfig_path = expanduser(KUBE_CONFIG_DEFAULT_LOCATION)
|
|
||||||
if Path(kubeconfig_path).exists():
|
|
||||||
LOGGER.debug("Detected kubeconfig")
|
|
||||||
kubeconfig_local_name = f"k8s-{gethostname()}"
|
|
||||||
if not KubernetesServiceConnection.objects.filter(
|
|
||||||
name=kubeconfig_local_name
|
|
||||||
).exists():
|
|
||||||
LOGGER.debug("Creating kubeconfig Service Connection")
|
|
||||||
with open(kubeconfig_path, "r") as _kubeconfig:
|
|
||||||
KubernetesServiceConnection.objects.create(
|
|
||||||
name=kubeconfig_local_name,
|
|
||||||
kubeconfig=yaml.safe_load(_kubeconfig),
|
|
||||||
)
|
|
||||||
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
|
||||||
socket = Path(unix_socket_path)
|
|
||||||
if socket.exists() and access(socket, R_OK):
|
|
||||||
LOGGER.debug("Detected local docker socket")
|
|
||||||
if len(DockerServiceConnection.objects.filter(local=True)) == 0:
|
|
||||||
LOGGER.debug("Created Service Connection for docker")
|
|
||||||
DockerServiceConnection.objects.create(
|
|
||||||
name="Local Docker connection",
|
|
||||||
local=True,
|
|
||||||
url=unix_socket_path,
|
|
||||||
)
|
|
||||||
|
@ -42,6 +42,8 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
|
|
||||||
outpost: Optional[Outpost] = None
|
outpost: Optional[Outpost] = None
|
||||||
|
|
||||||
|
last_uid: Optional[str] = None
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
super().connect()
|
super().connect()
|
||||||
uuid = self.scope["url_route"]["kwargs"]["pk"]
|
uuid = self.scope["url_route"]["kwargs"]["pk"]
|
||||||
@ -52,9 +54,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
raise DenyConnection()
|
raise DenyConnection()
|
||||||
self.accept()
|
self.accept()
|
||||||
self.outpost = outpost.first()
|
self.outpost = outpost.first()
|
||||||
OutpostState(
|
self.last_uid = self.channel_name
|
||||||
uid=self.channel_name, last_seen=datetime.now(), _outpost=self.outpost
|
|
||||||
).save(timeout=OUTPOST_HELLO_INTERVAL * 1.5)
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"added outpost instace to cache",
|
"added outpost instace to cache",
|
||||||
outpost=self.outpost,
|
outpost=self.outpost,
|
||||||
@ -63,18 +63,20 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def disconnect(self, close_code):
|
def disconnect(self, close_code):
|
||||||
if self.outpost:
|
if self.outpost and self.last_uid:
|
||||||
OutpostState.for_channel(self.outpost, self.channel_name).delete()
|
OutpostState.for_channel(self.outpost, self.last_uid).delete()
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"removed outpost instance from cache",
|
"removed outpost instance from cache",
|
||||||
outpost=self.outpost,
|
outpost=self.outpost,
|
||||||
channel_name=self.channel_name,
|
instance_uuid=self.last_uid,
|
||||||
)
|
)
|
||||||
|
|
||||||
def receive_json(self, content: Data):
|
def receive_json(self, content: Data):
|
||||||
msg = from_dict(WebsocketMessage, content)
|
msg = from_dict(WebsocketMessage, content)
|
||||||
|
uid = msg.args.get("uuid", self.channel_name)
|
||||||
|
self.last_uid = uid
|
||||||
state = OutpostState(
|
state = OutpostState(
|
||||||
uid=self.channel_name,
|
uid=uid,
|
||||||
last_seen=datetime.now(),
|
last_seen=datetime.now(),
|
||||||
_outpost=self.outpost,
|
_outpost=self.outpost,
|
||||||
)
|
)
|
||||||
|
@ -1,9 +1,12 @@
|
|||||||
"""Base Controller"""
|
"""Base Controller"""
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
|
|
||||||
|
from authentik import __version__
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
||||||
|
|
||||||
@ -21,6 +24,7 @@ class DeploymentPort:
|
|||||||
port: int
|
port: int
|
||||||
name: str
|
name: str
|
||||||
protocol: str
|
protocol: str
|
||||||
|
inner_port: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
class BaseController:
|
class BaseController:
|
||||||
@ -52,6 +56,17 @@ class BaseController:
|
|||||||
"""Handler to delete everything we've created"""
|
"""Handler to delete everything we've created"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def down_with_logs(self) -> list[str]:
|
||||||
|
"""Call .down() but capture all log output and return it."""
|
||||||
|
with capture_logs() as logs:
|
||||||
|
self.down()
|
||||||
|
return [x["event"] for x in logs]
|
||||||
|
|
||||||
def get_static_deployment(self) -> str:
|
def get_static_deployment(self) -> str:
|
||||||
"""Return a static deployment configuration"""
|
"""Return a static deployment configuration"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_container_image(self) -> str:
|
||||||
|
"""Get container image to use for this outpost"""
|
||||||
|
image_name_template: str = CONFIG.y("outposts.docker_image_base")
|
||||||
|
return image_name_template % {"type": self.outpost.type, "version": __version__}
|
||||||
|
@ -8,7 +8,6 @@ from docker.models.containers import Container
|
|||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|
||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.outposts.controllers.base import BaseController, ControllerException
|
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||||
from authentik.outposts.models import (
|
from authentik.outposts.models import (
|
||||||
DockerServiceConnection,
|
DockerServiceConnection,
|
||||||
@ -60,15 +59,14 @@ class DockerController(BaseController):
|
|||||||
return self.client.containers.get(container_name), False
|
return self.client.containers.get(container_name), False
|
||||||
except NotFound:
|
except NotFound:
|
||||||
self.logger.info("Container does not exist, creating")
|
self.logger.info("Container does not exist, creating")
|
||||||
image_prefix = CONFIG.y("outposts.docker_image_base")
|
image_name = self.get_container_image()
|
||||||
image_name = f"{image_prefix}-{self.outpost.type}:{__version__}"
|
|
||||||
self.client.images.pull(image_name)
|
self.client.images.pull(image_name)
|
||||||
container_args = {
|
container_args = {
|
||||||
"image": image_name,
|
"image": image_name,
|
||||||
"name": f"authentik-proxy-{self.outpost.uuid.hex}",
|
"name": f"authentik-proxy-{self.outpost.uuid.hex}",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"ports": {
|
"ports": {
|
||||||
f"{port.port}/{port.protocol.lower()}": port.port
|
f"{port.port}/{port.protocol.lower()}": port.inner_port or port.port
|
||||||
for port in self.deployment_ports
|
for port in self.deployment_ports
|
||||||
},
|
},
|
||||||
"environment": self._get_env(),
|
"environment": self._get_env(),
|
||||||
@ -134,7 +132,8 @@ class DockerController(BaseController):
|
|||||||
def down(self):
|
def down(self):
|
||||||
try:
|
try:
|
||||||
container, _ = self._get_container()
|
container, _ = self._get_container()
|
||||||
container.kill()
|
if container.status == "running":
|
||||||
|
container.kill()
|
||||||
container.remove()
|
container.remove()
|
||||||
except DockerException as exc:
|
except DockerException as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException from exc
|
||||||
@ -142,15 +141,15 @@ class DockerController(BaseController):
|
|||||||
def get_static_deployment(self) -> str:
|
def get_static_deployment(self) -> str:
|
||||||
"""Generate docker-compose yaml for proxy, version 3.5"""
|
"""Generate docker-compose yaml for proxy, version 3.5"""
|
||||||
ports = [
|
ports = [
|
||||||
f"{port.port}:{port.port}/{port.protocol.lower()}"
|
f"{port.port}:{port.inner_port or port.port}/{port.protocol.lower()}"
|
||||||
for port in self.deployment_ports
|
for port in self.deployment_ports
|
||||||
]
|
]
|
||||||
image_prefix = CONFIG.y("outposts.docker_image_base")
|
image_name = self.get_container_image()
|
||||||
compose = {
|
compose = {
|
||||||
"version": "3.5",
|
"version": "3.5",
|
||||||
"services": {
|
"services": {
|
||||||
f"authentik_{self.outpost.type}": {
|
f"authentik_{self.outpost.type}": {
|
||||||
"image": f"{image_prefix}-{self.outpost.type}:{__version__}",
|
"image": image_name,
|
||||||
"ports": ports,
|
"ports": ports,
|
||||||
"environment": {
|
"environment": {
|
||||||
"AUTHENTIK_HOST": self.outpost.config.authentik_host,
|
"AUTHENTIK_HOST": self.outpost.config.authentik_host,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Base Kubernetes Reconciler"""
|
"""Base Kubernetes Reconciler"""
|
||||||
from typing import TYPE_CHECKING, Generic, TypeVar
|
from typing import TYPE_CHECKING, Generic, TypeVar
|
||||||
|
|
||||||
|
from django.utils.text import slugify
|
||||||
from kubernetes.client import V1ObjectMeta
|
from kubernetes.client import V1ObjectMeta
|
||||||
from kubernetes.client.models.v1_deployment import V1Deployment
|
from kubernetes.client.models.v1_deployment import V1Deployment
|
||||||
from kubernetes.client.models.v1_pod import V1Pod
|
from kubernetes.client.models.v1_pod import V1Pod
|
||||||
@ -37,16 +38,30 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
def __init__(self, controller: "KubernetesController"):
|
def __init__(self, controller: "KubernetesController"):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.namespace = controller.outpost.config.kubernetes_namespace
|
self.namespace = controller.outpost.config.kubernetes_namespace
|
||||||
self.logger = get_logger()
|
self.logger = get_logger().bind(type=self.__class__.__name__)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def noop(self) -> bool:
|
||||||
|
"""Return true if this object should not be created/updated/deleted in this cluster"""
|
||||||
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
"""Get the name of the object this reconciler manages"""
|
"""Get the name of the object this reconciler manages"""
|
||||||
raise NotImplementedError
|
return (
|
||||||
|
self.controller.outpost.config.object_naming_template
|
||||||
|
% {
|
||||||
|
"name": slugify(self.controller.outpost.name),
|
||||||
|
"uuid": self.controller.outpost.uuid.hex,
|
||||||
|
}
|
||||||
|
).lower()
|
||||||
|
|
||||||
def up(self):
|
def up(self):
|
||||||
"""Create object if it doesn't exist, update if needed or recreate if needed."""
|
"""Create object if it doesn't exist, update if needed or recreate if needed."""
|
||||||
current = None
|
current = None
|
||||||
|
if self.noop:
|
||||||
|
self.logger.debug("Object is noop")
|
||||||
|
return
|
||||||
reference = self.get_reference_object()
|
reference = self.get_reference_object()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
@ -58,7 +73,6 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
self.logger.debug("Other unhandled error", exc=exc)
|
self.logger.debug("Other unhandled error", exc=exc)
|
||||||
raise exc
|
raise exc
|
||||||
else:
|
else:
|
||||||
self.logger.debug("Got current, running reconcile")
|
|
||||||
self.reconcile(current, reference)
|
self.reconcile(current, reference)
|
||||||
except NeedsRecreate:
|
except NeedsRecreate:
|
||||||
self.logger.debug("Recreate requested")
|
self.logger.debug("Recreate requested")
|
||||||
@ -67,16 +81,19 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
self.delete(current)
|
self.delete(current)
|
||||||
else:
|
else:
|
||||||
self.logger.debug("No old found, creating")
|
self.logger.debug("No old found, creating")
|
||||||
self.logger.debug("Created")
|
self.logger.debug("Creating")
|
||||||
self.create(reference)
|
self.create(reference)
|
||||||
except NeedsUpdate:
|
except NeedsUpdate:
|
||||||
self.logger.debug("Updating")
|
self.logger.debug("Updating")
|
||||||
self.update(current, reference)
|
self.update(current, reference)
|
||||||
else:
|
else:
|
||||||
self.logger.debug("Nothing to do...")
|
self.logger.debug("Object is up-to-date.")
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
"""Delete object if found"""
|
"""Delete object if found"""
|
||||||
|
if self.noop:
|
||||||
|
self.logger.debug("Object is noop")
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
current = self.retrieve()
|
current = self.retrieve()
|
||||||
self.delete(current)
|
self.delete(current)
|
||||||
@ -120,7 +137,7 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
namespace=self.namespace,
|
namespace=self.namespace,
|
||||||
labels={
|
labels={
|
||||||
"app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}",
|
"app.kubernetes.io/name": f"authentik-{self.controller.outpost.type.lower()}",
|
||||||
"app.kubernetes.io/instance": self.controller.outpost.name,
|
"app.kubernetes.io/instance": slugify(self.controller.outpost.name),
|
||||||
"app.kubernetes.io/version": __version__,
|
"app.kubernetes.io/version": __version__,
|
||||||
"app.kubernetes.io/managed-by": "goauthentik.io",
|
"app.kubernetes.io/managed-by": "goauthentik.io",
|
||||||
"goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,
|
"goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||||
|
@ -16,8 +16,6 @@ from kubernetes.client import (
|
|||||||
V1SecretKeySelector,
|
V1SecretKeySelector,
|
||||||
)
|
)
|
||||||
|
|
||||||
from authentik import __version__
|
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.outposts.controllers.base import FIELD_MANAGER
|
from authentik.outposts.controllers.base import FIELD_MANAGER
|
||||||
from authentik.outposts.controllers.k8s.base import (
|
from authentik.outposts.controllers.k8s.base import (
|
||||||
KubernetesObjectReconciler,
|
KubernetesObjectReconciler,
|
||||||
@ -39,10 +37,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
self.api = AppsV1Api(controller.client)
|
self.api = AppsV1Api(controller.client)
|
||||||
self.outpost = self.controller.outpost
|
self.outpost = self.controller.outpost
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return f"authentik-outpost-{self.controller.outpost.uuid.hex}"
|
|
||||||
|
|
||||||
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
||||||
super().reconcile(current, reference)
|
super().reconcile(current, reference)
|
||||||
if current.spec.replicas != reference.spec.replicas:
|
if current.spec.replicas != reference.spec.replicas:
|
||||||
@ -68,14 +62,13 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
for port in self.controller.deployment_ports:
|
for port in self.controller.deployment_ports:
|
||||||
container_ports.append(
|
container_ports.append(
|
||||||
V1ContainerPort(
|
V1ContainerPort(
|
||||||
container_port=port.port,
|
container_port=port.inner_port or port.port,
|
||||||
name=port.name,
|
name=port.name,
|
||||||
protocol=port.protocol.upper(),
|
protocol=port.protocol.upper(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
meta = self.get_object_meta(name=self.name)
|
meta = self.get_object_meta(name=self.name)
|
||||||
secret_name = f"authentik-outpost-{self.controller.outpost.uuid.hex}-api"
|
image_name = self.controller.get_container_image()
|
||||||
image_prefix = CONFIG.y("outposts.docker_image_base")
|
|
||||||
return V1Deployment(
|
return V1Deployment(
|
||||||
metadata=meta,
|
metadata=meta,
|
||||||
spec=V1DeploymentSpec(
|
spec=V1DeploymentSpec(
|
||||||
@ -87,14 +80,14 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
containers=[
|
containers=[
|
||||||
V1Container(
|
V1Container(
|
||||||
name=str(self.outpost.type),
|
name=str(self.outpost.type),
|
||||||
image=f"{image_prefix}-{self.outpost.type}:{__version__}",
|
image=image_name,
|
||||||
ports=container_ports,
|
ports=container_ports,
|
||||||
env=[
|
env=[
|
||||||
V1EnvVar(
|
V1EnvVar(
|
||||||
name="AUTHENTIK_HOST",
|
name="AUTHENTIK_HOST",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=secret_name,
|
name=self.name,
|
||||||
key="authentik_host",
|
key="authentik_host",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -103,7 +96,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
name="AUTHENTIK_TOKEN",
|
name="AUTHENTIK_TOKEN",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=secret_name,
|
name=self.name,
|
||||||
key="token",
|
key="token",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -112,7 +105,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
name="AUTHENTIK_INSECURE",
|
name="AUTHENTIK_INSECURE",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=secret_name,
|
name=self.name,
|
||||||
key="authentik_host_insecure",
|
key="authentik_host_insecure",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
@ -26,10 +26,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api(controller.client)
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return f"authentik-outpost-{self.controller.outpost.uuid.hex}-api"
|
|
||||||
|
|
||||||
def reconcile(self, current: V1Secret, reference: V1Secret):
|
def reconcile(self, current: V1Secret, reference: V1Secret):
|
||||||
super().reconcile(current, reference)
|
super().reconcile(current, reference)
|
||||||
for key in reference.data.keys():
|
for key in reference.data.keys():
|
||||||
|
@ -21,10 +21,6 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api(controller.client)
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return f"authentik-outpost-{self.controller.outpost.uuid.hex}"
|
|
||||||
|
|
||||||
def reconcile(self, current: V1Service, reference: V1Service):
|
def reconcile(self, current: V1Service, reference: V1Service):
|
||||||
super().reconcile(current, reference)
|
super().reconcile(current, reference)
|
||||||
if len(current.spec.ports) != len(reference.spec.ports):
|
if len(current.spec.ports) != len(reference.spec.ports):
|
||||||
@ -43,13 +39,17 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
name=port.name,
|
name=port.name,
|
||||||
port=port.port,
|
port=port.port,
|
||||||
protocol=port.protocol.upper(),
|
protocol=port.protocol.upper(),
|
||||||
target_port=port.port,
|
target_port=port.inner_port or port.port,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
|
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
|
||||||
return V1Service(
|
return V1Service(
|
||||||
metadata=meta,
|
metadata=meta,
|
||||||
spec=V1ServiceSpec(ports=ports, selector=selector_labels, type="ClusterIP"),
|
spec=V1ServiceSpec(
|
||||||
|
ports=ports,
|
||||||
|
selector=selector_labels,
|
||||||
|
type=self.controller.outpost.config.kubernetes_service_type,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, reference: V1Service):
|
def create(self, reference: V1Service):
|
||||||
|
11
authentik/outposts/controllers/k8s/utils.py
Normal file
11
authentik/outposts/controllers/k8s/utils.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""k8s utils"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def get_namespace() -> str:
|
||||||
|
"""Get the namespace if we're running in a pod, otherwise default to default"""
|
||||||
|
path = Path("/var/run/secrets/kubernetes.io/serviceaccount/namespace")
|
||||||
|
if path.exists():
|
||||||
|
with open(path, "r") as _namespace_file:
|
||||||
|
return _namespace_file.read()
|
||||||
|
return "default"
|
@ -2,10 +2,9 @@
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
from kubernetes.client import OpenApiException
|
|
||||||
from kubernetes.client.api_client import ApiClient
|
from kubernetes.client.api_client import ApiClient
|
||||||
|
from kubernetes.client.exceptions import ApiException
|
||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
from urllib3.exceptions import HTTPError
|
|
||||||
from yaml import dump_all
|
from yaml import dump_all
|
||||||
|
|
||||||
from authentik.outposts.controllers.base import BaseController, ControllerException
|
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||||
@ -43,34 +42,55 @@ class KubernetesController(BaseController):
|
|||||||
reconciler = self.reconcilers[reconcile_key](self)
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
reconciler.up()
|
reconciler.up()
|
||||||
|
|
||||||
except (OpenApiException, HTTPError) as exc:
|
except ApiException as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException(str(exc)) from exc
|
||||||
|
|
||||||
def up_with_logs(self) -> list[str]:
|
def up_with_logs(self) -> list[str]:
|
||||||
try:
|
try:
|
||||||
all_logs = []
|
all_logs = []
|
||||||
for reconcile_key in self.reconcile_order:
|
for reconcile_key in self.reconcile_order:
|
||||||
|
if reconcile_key in self.outpost.config.kubernetes_disabled_components:
|
||||||
|
all_logs += [f"{reconcile_key.title()}: Disabled"]
|
||||||
|
continue
|
||||||
with capture_logs() as logs:
|
with capture_logs() as logs:
|
||||||
reconciler = self.reconcilers[reconcile_key](self)
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
reconciler.up()
|
reconciler.up()
|
||||||
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||||
return all_logs
|
return all_logs
|
||||||
except (OpenApiException, HTTPError) as exc:
|
except ApiException as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException(str(exc)) from exc
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
try:
|
try:
|
||||||
for reconcile_key in self.reconcile_order:
|
for reconcile_key in self.reconcile_order:
|
||||||
reconciler = self.reconcilers[reconcile_key](self)
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
|
self.logger.debug("Tearing down object", name=reconcile_key)
|
||||||
reconciler.down()
|
reconciler.down()
|
||||||
|
|
||||||
except OpenApiException as exc:
|
except ApiException as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException(str(exc)) from exc
|
||||||
|
|
||||||
|
def down_with_logs(self) -> list[str]:
|
||||||
|
try:
|
||||||
|
all_logs = []
|
||||||
|
for reconcile_key in self.reconcile_order:
|
||||||
|
if reconcile_key in self.outpost.config.kubernetes_disabled_components:
|
||||||
|
all_logs += [f"{reconcile_key.title()}: Disabled"]
|
||||||
|
continue
|
||||||
|
with capture_logs() as logs:
|
||||||
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
|
reconciler.down()
|
||||||
|
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||||
|
return all_logs
|
||||||
|
except ApiException as exc:
|
||||||
|
raise ControllerException(str(exc)) from exc
|
||||||
|
|
||||||
def get_static_deployment(self) -> str:
|
def get_static_deployment(self) -> str:
|
||||||
documents = []
|
documents = []
|
||||||
for reconcile_key in self.reconcile_order:
|
for reconcile_key in self.reconcile_order:
|
||||||
reconciler = self.reconcilers[reconcile_key](self)
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
|
if reconciler.noop:
|
||||||
|
continue
|
||||||
documents.append(reconciler.get_reference_object().to_dict())
|
documents.append(reconciler.get_reference_object().to_dict())
|
||||||
|
|
||||||
with StringIO() as _str:
|
with StringIO() as _str:
|
||||||
|
20
authentik/outposts/migrations/0016_alter_outpost_type.py
Normal file
20
authentik/outposts/migrations/0016_alter_outpost_type.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-26 09:27
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_outposts", "0015_auto_20201224_1206"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="type",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[("proxy", "Proxy"), ("ldap", "Ldap")], default="proxy"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -5,6 +5,7 @@ from typing import Iterable, Optional, Union
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from dacite import from_dict
|
from dacite import from_dict
|
||||||
|
from django.contrib.auth.models import Permission
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
@ -31,6 +32,8 @@ from authentik.crypto.models import CertificateKeyPair
|
|||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.models import InheritanceForeignKey
|
from authentik.lib.models import InheritanceForeignKey
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
|
from authentik.lib.utils.http import USER_ATTRIBUTE_CAN_OVERRIDE_IP
|
||||||
|
from authentik.outposts.controllers.k8s.utils import get_namespace
|
||||||
from authentik.outposts.docker_tls import DockerInlineTLS
|
from authentik.outposts.docker_tls import DockerInlineTLS
|
||||||
|
|
||||||
OUR_VERSION = parse(__version__)
|
OUR_VERSION = parse(__version__)
|
||||||
@ -39,7 +42,7 @@ LOGGER = get_logger()
|
|||||||
|
|
||||||
|
|
||||||
class ServiceConnectionInvalid(SentryIgnoredException):
|
class ServiceConnectionInvalid(SentryIgnoredException):
|
||||||
""""Exception raised when a Service Connection has invalid parameters"""
|
"""Exception raised when a Service Connection has invalid parameters"""
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -55,16 +58,19 @@ class OutpostConfig:
|
|||||||
"error_reporting.environment", "customer"
|
"error_reporting.environment", "customer"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
object_naming_template: str = field(default="ak-outpost-%(name)s")
|
||||||
kubernetes_replicas: int = field(default=1)
|
kubernetes_replicas: int = field(default=1)
|
||||||
kubernetes_namespace: str = field(default="default")
|
kubernetes_namespace: str = field(default_factory=get_namespace)
|
||||||
kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict)
|
kubernetes_ingress_annotations: dict[str, str] = field(default_factory=dict)
|
||||||
kubernetes_ingress_secret_name: str = field(default="authentik-outpost")
|
kubernetes_ingress_secret_name: str = field(default="authentik-outpost-tls")
|
||||||
|
kubernetes_service_type: str = field(default="ClusterIP")
|
||||||
|
kubernetes_disabled_components: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
class OutpostModel(Model):
|
class OutpostModel(Model):
|
||||||
"""Base model for providers that need more objects than just themselves"""
|
"""Base model for providers that need more objects than just themselves"""
|
||||||
|
|
||||||
def get_required_objects(self) -> Iterable[models.Model]:
|
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
||||||
"""Return a list of all required objects"""
|
"""Return a list of all required objects"""
|
||||||
return [self]
|
return [self]
|
||||||
|
|
||||||
@ -77,6 +83,7 @@ class OutpostType(models.TextChoices):
|
|||||||
"""Outpost types, currently only the reverse proxy is available"""
|
"""Outpost types, currently only the reverse proxy is available"""
|
||||||
|
|
||||||
PROXY = "proxy"
|
PROXY = "proxy"
|
||||||
|
LDAP = "ldap"
|
||||||
|
|
||||||
|
|
||||||
def default_outpost_config(host: Optional[str] = None):
|
def default_outpost_config(host: Optional[str] = None):
|
||||||
@ -201,7 +208,7 @@ class DockerServiceConnection(OutpostServiceConnection):
|
|||||||
)
|
)
|
||||||
client.containers.list()
|
client.containers.list()
|
||||||
except DockerException as exc:
|
except DockerException as exc:
|
||||||
LOGGER.error(exc)
|
LOGGER.warning(exc)
|
||||||
raise ServiceConnectionInvalid from exc
|
raise ServiceConnectionInvalid from exc
|
||||||
return client
|
return client
|
||||||
|
|
||||||
@ -326,6 +333,7 @@ class Outpost(models.Model):
|
|||||||
if not users.exists():
|
if not users.exists():
|
||||||
user: User = User.objects.create(username=self.user_identifier)
|
user: User = User.objects.create(username=self.user_identifier)
|
||||||
user.attributes[USER_ATTRIBUTE_SA] = True
|
user.attributes[USER_ATTRIBUTE_SA] = True
|
||||||
|
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user.save()
|
user.save()
|
||||||
else:
|
else:
|
||||||
@ -334,9 +342,29 @@ class Outpost(models.Model):
|
|||||||
# the ones the user needs
|
# the ones the user needs
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
UserObjectPermission.objects.filter(user=user).delete()
|
UserObjectPermission.objects.filter(user=user).delete()
|
||||||
for model in self.get_required_objects():
|
user.user_permissions.clear()
|
||||||
code_name = f"{model._meta.app_label}.view_{model._meta.model_name}"
|
for model_or_perm in self.get_required_objects():
|
||||||
assign_perm(code_name, user, model)
|
if isinstance(model_or_perm, models.Model):
|
||||||
|
model_or_perm: models.Model
|
||||||
|
code_name = (
|
||||||
|
f"{model_or_perm._meta.app_label}."
|
||||||
|
f"view_{model_or_perm._meta.model_name}"
|
||||||
|
)
|
||||||
|
assign_perm(code_name, user, model_or_perm)
|
||||||
|
else:
|
||||||
|
app_label, perm = model_or_perm.split(".")
|
||||||
|
permission = Permission.objects.filter(
|
||||||
|
codename=perm,
|
||||||
|
content_type__app_label=app_label,
|
||||||
|
)
|
||||||
|
if not permission.exists():
|
||||||
|
LOGGER.warning("permission doesn't exist", perm=model_or_perm)
|
||||||
|
continue
|
||||||
|
user.user_permissions.add(permission.first())
|
||||||
|
LOGGER.debug(
|
||||||
|
"Updated service account's permissions",
|
||||||
|
perms=UserObjectPermission.objects.filter(user=user),
|
||||||
|
)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -359,9 +387,9 @@ class Outpost(models.Model):
|
|||||||
managed=f"goauthentik.io/outpost/{self.token_identifier}",
|
managed=f"goauthentik.io/outpost/{self.token_identifier}",
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_required_objects(self) -> Iterable[models.Model]:
|
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
||||||
"""Get an iterator of all objects the user needs read access to"""
|
"""Get an iterator of all objects the user needs read access to"""
|
||||||
objects = [self]
|
objects: list[Union[models.Model, str]] = [self]
|
||||||
for provider in (
|
for provider in (
|
||||||
Provider.objects.filter(outpost=self).select_related().select_subclasses()
|
Provider.objects.filter(outpost=self).select_related().select_subclasses()
|
||||||
):
|
):
|
||||||
|
@ -9,7 +9,7 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
},
|
},
|
||||||
"outposts_service_connection_check": {
|
"outposts_service_connection_check": {
|
||||||
"task": "authentik.outposts.tasks.outpost_service_connection_monitor",
|
"task": "authentik.outposts.tasks.outpost_service_connection_monitor",
|
||||||
"schedule": crontab(minute=0, hour="*"),
|
"schedule": crontab(minute="*/60"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
},
|
},
|
||||||
"outpost_token_ensurer": {
|
"outpost_token_ensurer": {
|
||||||
@ -17,4 +17,9 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
"schedule": crontab(minute="*/5"),
|
"schedule": crontab(minute="*/5"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
},
|
},
|
||||||
|
"outpost_local_connection": {
|
||||||
|
"task": "authentik.outposts.tasks.outpost_local_connection",
|
||||||
|
"schedule": crontab(minute="*/60"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""authentik outpost signals"""
|
"""authentik outpost signals"""
|
||||||
from django.conf import settings
|
from django.core.cache import cache
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete
|
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
@ -9,7 +9,11 @@ from authentik.core.models import Provider
|
|||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.lib.utils.reflection import class_to_path
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
from authentik.outposts.models import Outpost, OutpostServiceConnection
|
||||||
from authentik.outposts.tasks import outpost_post_save, outpost_pre_delete
|
from authentik.outposts.tasks import (
|
||||||
|
CACHE_KEY_OUTPOST_DOWN,
|
||||||
|
outpost_controller,
|
||||||
|
outpost_post_save,
|
||||||
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
UPDATE_TRIGGERING_MODELS = (
|
UPDATE_TRIGGERING_MODELS = (
|
||||||
@ -20,6 +24,28 @@ UPDATE_TRIGGERING_MODELS = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_save, sender=Outpost)
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def pre_save_outpost(sender, instance: Outpost, **_):
|
||||||
|
"""Pre-save checks for an outpost, if the name or config.kubernetes_namespace changes,
|
||||||
|
we call down and then wait for the up after save"""
|
||||||
|
old_instances = Outpost.objects.filter(pk=instance.pk)
|
||||||
|
if not old_instances.exists():
|
||||||
|
return
|
||||||
|
old_instance = old_instances.first()
|
||||||
|
dirty = False
|
||||||
|
# Name changes the deployment name, need to recreate
|
||||||
|
dirty += old_instance.name != instance.name
|
||||||
|
# namespace requires re-create
|
||||||
|
dirty += (
|
||||||
|
old_instance.config.kubernetes_namespace != instance.config.kubernetes_namespace
|
||||||
|
)
|
||||||
|
if bool(dirty):
|
||||||
|
LOGGER.info("Outpost needs re-deployment due to changes", instance=instance)
|
||||||
|
cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, old_instance)
|
||||||
|
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save)
|
@receiver(post_save)
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def post_save_update(sender, instance: Model, **_):
|
def post_save_update(sender, instance: Model, **_):
|
||||||
@ -41,15 +67,5 @@ def post_save_update(sender, instance: Model, **_):
|
|||||||
def pre_delete_cleanup(sender, instance: Outpost, **_):
|
def pre_delete_cleanup(sender, instance: Outpost, **_):
|
||||||
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
|
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
|
||||||
instance.user.delete()
|
instance.user.delete()
|
||||||
# To ensure that deployment is cleaned up *consistently* we call the controller, and wait
|
cache.set(CACHE_KEY_OUTPOST_DOWN % instance.pk.hex, instance)
|
||||||
# for it to finish. We don't want to call it in this thread, as we don't have the Outpost
|
outpost_controller.delay(instance.pk.hex, action="down", from_cache=True)
|
||||||
# Service connection here
|
|
||||||
try:
|
|
||||||
outpost_pre_delete.delay(instance.pk.hex).get()
|
|
||||||
except RuntimeError:
|
|
||||||
# In e2e/integration tests, this might run inside a thread/process and
|
|
||||||
# trigger the celery `Never call result.get() within a task` detection
|
|
||||||
if settings.TEST:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
@ -1,16 +1,25 @@
|
|||||||
"""outpost tasks"""
|
"""outpost tasks"""
|
||||||
from typing import Any
|
from os import R_OK, access
|
||||||
|
from os.path import expanduser
|
||||||
|
from pathlib import Path
|
||||||
|
from socket import gethostname
|
||||||
|
from typing import Any, Optional
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import yaml
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
|
from docker.constants import DEFAULT_UNIX_SOCKET
|
||||||
|
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||||
|
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||||
from authentik.lib.utils.reflection import path_to_class
|
from authentik.lib.utils.reflection import path_to_class
|
||||||
from authentik.outposts.controllers.base import ControllerException
|
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||||
from authentik.outposts.models import (
|
from authentik.outposts.models import (
|
||||||
DockerServiceConnection,
|
DockerServiceConnection,
|
||||||
KubernetesServiceConnection,
|
KubernetesServiceConnection,
|
||||||
@ -20,18 +29,32 @@ from authentik.outposts.models import (
|
|||||||
OutpostState,
|
OutpostState,
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.ldap.controllers.docker import LDAPDockerController
|
||||||
|
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
|
||||||
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
||||||
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s"
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
def controller_for_outpost(outpost: Outpost) -> Optional[BaseController]:
|
||||||
def outpost_controller_all():
|
"""Get a controller for the outpost, when a service connection is defined"""
|
||||||
"""Launch Controller for all Outposts which support it"""
|
if not outpost.service_connection:
|
||||||
for outpost in Outpost.objects.exclude(service_connection=None):
|
return None
|
||||||
outpost_controller.delay(outpost.pk.hex)
|
service_connection = outpost.service_connection
|
||||||
|
if outpost.type == OutpostType.PROXY:
|
||||||
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
|
return ProxyDockerController(outpost, service_connection)
|
||||||
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
return ProxyKubernetesController(outpost, service_connection)
|
||||||
|
if outpost.type == OutpostType.LDAP:
|
||||||
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
|
return LDAPDockerController(outpost, service_connection)
|
||||||
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
return LDAPKubernetesController(outpost, service_connection)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
@ -60,23 +83,29 @@ def outpost_service_connection_monitor(self: MonitoredTask):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def outpost_controller_all():
|
||||||
|
"""Launch Controller for all Outposts which support it"""
|
||||||
|
for outpost in Outpost.objects.exclude(service_connection=None):
|
||||||
|
outpost_controller.delay(outpost.pk.hex, "up", from_cache=False)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
def outpost_controller(
|
||||||
"""Create/update/monitor the deployment of an Outpost"""
|
self: MonitoredTask, outpost_pk: str, action: str = "up", from_cache: bool = False
|
||||||
|
):
|
||||||
|
"""Create/update/monitor/delete the deployment of an Outpost"""
|
||||||
logs = []
|
logs = []
|
||||||
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
if from_cache:
|
||||||
|
outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
|
||||||
|
else:
|
||||||
|
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
||||||
self.set_uid(slugify(outpost.name))
|
self.set_uid(slugify(outpost.name))
|
||||||
try:
|
try:
|
||||||
if not outpost.service_connection:
|
controller = controller_for_outpost(outpost)
|
||||||
|
if not controller:
|
||||||
return
|
return
|
||||||
if outpost.type == OutpostType.PROXY:
|
logs = getattr(controller, f"{action}_with_logs")()
|
||||||
service_connection = outpost.service_connection
|
|
||||||
if isinstance(service_connection, DockerServiceConnection):
|
|
||||||
logs = ProxyDockerController(outpost, service_connection).up_with_logs()
|
|
||||||
if isinstance(service_connection, KubernetesServiceConnection):
|
|
||||||
logs = ProxyKubernetesController(
|
|
||||||
outpost, service_connection
|
|
||||||
).up_with_logs()
|
|
||||||
LOGGER.debug("---------------Outpost Controller logs starting----------------")
|
LOGGER.debug("---------------Outpost Controller logs starting----------------")
|
||||||
for log in logs:
|
for log in logs:
|
||||||
LOGGER.debug(log)
|
LOGGER.debug(log)
|
||||||
@ -87,18 +116,6 @@ def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
|||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
|
||||||
def outpost_pre_delete(outpost_pk: str):
|
|
||||||
"""Delete outpost objects before deleting the DB Object"""
|
|
||||||
outpost = Outpost.objects.get(pk=outpost_pk)
|
|
||||||
if outpost.type == OutpostType.PROXY:
|
|
||||||
service_connection = outpost.service_connection
|
|
||||||
if isinstance(service_connection, DockerServiceConnection):
|
|
||||||
ProxyDockerController(outpost, service_connection).down()
|
|
||||||
if isinstance(service_connection, KubernetesServiceConnection):
|
|
||||||
ProxyKubernetesController(outpost, service_connection).down()
|
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
def outpost_token_ensurer(self: MonitoredTask):
|
def outpost_token_ensurer(self: MonitoredTask):
|
||||||
"""Periodically ensure that all Outposts have valid Service Accounts
|
"""Periodically ensure that all Outposts have valid Service Accounts
|
||||||
@ -185,3 +202,42 @@ def _outpost_single_update(outpost: Outpost, layer=None):
|
|||||||
for state in OutpostState.for_outpost(outpost):
|
for state in OutpostState.for_outpost(outpost):
|
||||||
LOGGER.debug("sending update", channel=state.uid, outpost=outpost)
|
LOGGER.debug("sending update", channel=state.uid, outpost=outpost)
|
||||||
async_to_sync(layer.send)(state.uid, {"type": "event.update"})
|
async_to_sync(layer.send)(state.uid, {"type": "event.update"})
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def outpost_local_connection():
|
||||||
|
"""Checks the local environment and create Service connections."""
|
||||||
|
# Explicitly check against token filename, as thats
|
||||||
|
# only present when the integration is enabled
|
||||||
|
if Path(SERVICE_TOKEN_FILENAME).exists():
|
||||||
|
LOGGER.debug("Detected in-cluster Kubernetes Config")
|
||||||
|
if not KubernetesServiceConnection.objects.filter(local=True).exists():
|
||||||
|
LOGGER.debug("Created Service Connection for in-cluster")
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name="Local Kubernetes Cluster", local=True, kubeconfig={}
|
||||||
|
)
|
||||||
|
# For development, check for the existence of a kubeconfig file
|
||||||
|
kubeconfig_path = expanduser(KUBE_CONFIG_DEFAULT_LOCATION)
|
||||||
|
if Path(kubeconfig_path).exists():
|
||||||
|
LOGGER.debug("Detected kubeconfig")
|
||||||
|
kubeconfig_local_name = f"k8s-{gethostname()}"
|
||||||
|
if not KubernetesServiceConnection.objects.filter(
|
||||||
|
name=kubeconfig_local_name
|
||||||
|
).exists():
|
||||||
|
LOGGER.debug("Creating kubeconfig Service Connection")
|
||||||
|
with open(kubeconfig_path, "r") as _kubeconfig:
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name=kubeconfig_local_name,
|
||||||
|
kubeconfig=yaml.safe_load(_kubeconfig),
|
||||||
|
)
|
||||||
|
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
||||||
|
socket = Path(unix_socket_path)
|
||||||
|
if socket.exists() and access(socket, R_OK):
|
||||||
|
LOGGER.debug("Detected local docker socket")
|
||||||
|
if len(DockerServiceConnection.objects.filter(local=True)) == 0:
|
||||||
|
LOGGER.debug("Created Service Connection for docker")
|
||||||
|
DockerServiceConnection.objects.create(
|
||||||
|
name="Local Docker connection",
|
||||||
|
local=True,
|
||||||
|
url=unix_socket_path,
|
||||||
|
)
|
||||||
|
@ -3,6 +3,10 @@ from django.urls import reverse
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import PropertyMapping, User
|
from authentik.core.models import PropertyMapping, User
|
||||||
|
from authentik.flows.models import Flow
|
||||||
|
from authentik.outposts.api.outposts import OutpostSerializer
|
||||||
|
from authentik.outposts.models import default_outpost_config
|
||||||
|
from authentik.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
class TestOutpostServiceConnectionsAPI(APITestCase):
|
class TestOutpostServiceConnectionsAPI(APITestCase):
|
||||||
@ -22,3 +26,22 @@ class TestOutpostServiceConnectionsAPI(APITestCase):
|
|||||||
reverse("authentik_api:outpostserviceconnection-types"),
|
reverse("authentik_api:outpostserviceconnection-types"),
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
def test_outpost_config(self):
|
||||||
|
"""Test Outpost's config field"""
|
||||||
|
provider = ProxyProvider.objects.create(
|
||||||
|
name="test", authorization_flow=Flow.objects.first()
|
||||||
|
)
|
||||||
|
invalid = OutpostSerializer(
|
||||||
|
data={"name": "foo", "providers": [provider.pk], "config": {}}
|
||||||
|
)
|
||||||
|
self.assertFalse(invalid.is_valid())
|
||||||
|
self.assertIn("config", invalid.errors)
|
||||||
|
valid = OutpostSerializer(
|
||||||
|
data={
|
||||||
|
"name": "foo",
|
||||||
|
"providers": [provider.pk],
|
||||||
|
"config": default_outpost_config("foo"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.assertTrue(valid.is_valid())
|
||||||
|
@ -0,0 +1,84 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-05-02 17:06
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies_event_matcher", "0012_auto_20210323_1339"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="eventmatcherpolicy",
|
||||||
|
name="app",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("authentik.admin", "authentik Admin"),
|
||||||
|
("authentik.api", "authentik API"),
|
||||||
|
("authentik.events", "authentik Events"),
|
||||||
|
("authentik.crypto", "authentik Crypto"),
|
||||||
|
("authentik.flows", "authentik Flows"),
|
||||||
|
("authentik.outposts", "authentik Outpost"),
|
||||||
|
("authentik.lib", "authentik lib"),
|
||||||
|
("authentik.policies", "authentik Policies"),
|
||||||
|
("authentik.policies.dummy", "authentik Policies.Dummy"),
|
||||||
|
(
|
||||||
|
"authentik.policies.event_matcher",
|
||||||
|
"authentik Policies.Event Matcher",
|
||||||
|
),
|
||||||
|
("authentik.policies.expiry", "authentik Policies.Expiry"),
|
||||||
|
("authentik.policies.expression", "authentik Policies.Expression"),
|
||||||
|
("authentik.policies.hibp", "authentik Policies.HaveIBeenPwned"),
|
||||||
|
("authentik.policies.password", "authentik Policies.Password"),
|
||||||
|
("authentik.policies.reputation", "authentik Policies.Reputation"),
|
||||||
|
("authentik.providers.proxy", "authentik Providers.Proxy"),
|
||||||
|
("authentik.providers.oauth2", "authentik Providers.OAuth2"),
|
||||||
|
("authentik.providers.saml", "authentik Providers.SAML"),
|
||||||
|
("authentik.recovery", "authentik Recovery"),
|
||||||
|
("authentik.sources.ldap", "authentik Sources.LDAP"),
|
||||||
|
("authentik.sources.oauth", "authentik Sources.OAuth"),
|
||||||
|
("authentik.sources.plex", "authentik Sources.Plex"),
|
||||||
|
("authentik.sources.saml", "authentik Sources.SAML"),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_static",
|
||||||
|
"authentik Stages.Authenticator.Static",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_totp",
|
||||||
|
"authentik Stages.Authenticator.TOTP",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_validate",
|
||||||
|
"authentik Stages.Authenticator.Validate",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_webauthn",
|
||||||
|
"authentik Stages.Authenticator.WebAuthn",
|
||||||
|
),
|
||||||
|
("authentik.stages.captcha", "authentik Stages.Captcha"),
|
||||||
|
("authentik.stages.consent", "authentik Stages.Consent"),
|
||||||
|
("authentik.stages.deny", "authentik Stages.Deny"),
|
||||||
|
("authentik.stages.dummy", "authentik Stages.Dummy"),
|
||||||
|
("authentik.stages.email", "authentik Stages.Email"),
|
||||||
|
(
|
||||||
|
"authentik.stages.identification",
|
||||||
|
"authentik Stages.Identification",
|
||||||
|
),
|
||||||
|
("authentik.stages.invitation", "authentik Stages.User Invitation"),
|
||||||
|
("authentik.stages.password", "authentik Stages.Password"),
|
||||||
|
("authentik.stages.prompt", "authentik Stages.Prompt"),
|
||||||
|
("authentik.stages.user_delete", "authentik Stages.User Delete"),
|
||||||
|
("authentik.stages.user_login", "authentik Stages.User Login"),
|
||||||
|
("authentik.stages.user_logout", "authentik Stages.User Logout"),
|
||||||
|
("authentik.stages.user_write", "authentik Stages.User Write"),
|
||||||
|
("authentik.core", "authentik Core"),
|
||||||
|
("authentik.managed", "authentik Managed"),
|
||||||
|
],
|
||||||
|
default="",
|
||||||
|
help_text="Match events created by selected application. When left empty, all applications are matched.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,85 @@
|
|||||||
|
# Generated by Django 3.2.1 on 2021-05-05 17:17
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies_event_matcher", "0013_alter_eventmatcherpolicy_app"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="eventmatcherpolicy",
|
||||||
|
name="app",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("authentik.admin", "authentik Admin"),
|
||||||
|
("authentik.api", "authentik API"),
|
||||||
|
("authentik.events", "authentik Events"),
|
||||||
|
("authentik.crypto", "authentik Crypto"),
|
||||||
|
("authentik.flows", "authentik Flows"),
|
||||||
|
("authentik.outposts", "authentik Outpost"),
|
||||||
|
("authentik.lib", "authentik lib"),
|
||||||
|
("authentik.policies", "authentik Policies"),
|
||||||
|
("authentik.policies.dummy", "authentik Policies.Dummy"),
|
||||||
|
(
|
||||||
|
"authentik.policies.event_matcher",
|
||||||
|
"authentik Policies.Event Matcher",
|
||||||
|
),
|
||||||
|
("authentik.policies.expiry", "authentik Policies.Expiry"),
|
||||||
|
("authentik.policies.expression", "authentik Policies.Expression"),
|
||||||
|
("authentik.policies.hibp", "authentik Policies.HaveIBeenPwned"),
|
||||||
|
("authentik.policies.password", "authentik Policies.Password"),
|
||||||
|
("authentik.policies.reputation", "authentik Policies.Reputation"),
|
||||||
|
("authentik.providers.proxy", "authentik Providers.Proxy"),
|
||||||
|
("authentik.providers.ldap", "authentik Providers.LDAP"),
|
||||||
|
("authentik.providers.oauth2", "authentik Providers.OAuth2"),
|
||||||
|
("authentik.providers.saml", "authentik Providers.SAML"),
|
||||||
|
("authentik.recovery", "authentik Recovery"),
|
||||||
|
("authentik.sources.ldap", "authentik Sources.LDAP"),
|
||||||
|
("authentik.sources.oauth", "authentik Sources.OAuth"),
|
||||||
|
("authentik.sources.plex", "authentik Sources.Plex"),
|
||||||
|
("authentik.sources.saml", "authentik Sources.SAML"),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_static",
|
||||||
|
"authentik Stages.Authenticator.Static",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_totp",
|
||||||
|
"authentik Stages.Authenticator.TOTP",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_validate",
|
||||||
|
"authentik Stages.Authenticator.Validate",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"authentik.stages.authenticator_webauthn",
|
||||||
|
"authentik Stages.Authenticator.WebAuthn",
|
||||||
|
),
|
||||||
|
("authentik.stages.captcha", "authentik Stages.Captcha"),
|
||||||
|
("authentik.stages.consent", "authentik Stages.Consent"),
|
||||||
|
("authentik.stages.deny", "authentik Stages.Deny"),
|
||||||
|
("authentik.stages.dummy", "authentik Stages.Dummy"),
|
||||||
|
("authentik.stages.email", "authentik Stages.Email"),
|
||||||
|
(
|
||||||
|
"authentik.stages.identification",
|
||||||
|
"authentik Stages.Identification",
|
||||||
|
),
|
||||||
|
("authentik.stages.invitation", "authentik Stages.User Invitation"),
|
||||||
|
("authentik.stages.password", "authentik Stages.Password"),
|
||||||
|
("authentik.stages.prompt", "authentik Stages.Prompt"),
|
||||||
|
("authentik.stages.user_delete", "authentik Stages.User Delete"),
|
||||||
|
("authentik.stages.user_login", "authentik Stages.User Login"),
|
||||||
|
("authentik.stages.user_logout", "authentik Stages.User Logout"),
|
||||||
|
("authentik.stages.user_write", "authentik Stages.User Write"),
|
||||||
|
("authentik.core", "authentik Core"),
|
||||||
|
("authentik.managed", "authentik Managed"),
|
||||||
|
],
|
||||||
|
default="",
|
||||||
|
help_text="Match events created by selected application. When left empty, all applications are matched.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -4,7 +4,7 @@
|
|||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
{% block title %}
|
{% block title %}
|
||||||
{% trans 'Permission denied - authentik' %}
|
{% trans 'Permission denied' %} - {{ config.authentik.branding.title }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block card_title %}
|
{% block card_title %}
|
||||||
|
0
authentik/providers/ldap/__init__.py
Normal file
0
authentik/providers/ldap/__init__.py
Normal file
54
authentik/providers/ldap/api.py
Normal file
54
authentik/providers/ldap/api.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
"""LDAPProvider API Views"""
|
||||||
|
from rest_framework.fields import CharField
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.providers.ldap.models import LDAPProvider
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPProviderSerializer(ProviderSerializer):
|
||||||
|
"""LDAPProvider Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = LDAPProvider
|
||||||
|
fields = ProviderSerializer.Meta.fields + [
|
||||||
|
"base_dn",
|
||||||
|
"search_group",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPProviderViewSet(ModelViewSet):
|
||||||
|
"""LDAPProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = LDAPProvider.objects.all()
|
||||||
|
serializer_class = LDAPProviderSerializer
|
||||||
|
ordering = ["name"]
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPOutpostConfigSerializer(ModelSerializer):
|
||||||
|
"""LDAPProvider Serializer"""
|
||||||
|
|
||||||
|
application_slug = CharField(source="application.slug")
|
||||||
|
bind_flow_slug = CharField(source="authorization_flow.slug")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = LDAPProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"base_dn",
|
||||||
|
"bind_flow_slug",
|
||||||
|
"application_slug",
|
||||||
|
"search_group",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPOutpostConfigViewSet(ReadOnlyModelViewSet):
|
||||||
|
"""LDAPProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = LDAPProvider.objects.filter(application__isnull=False)
|
||||||
|
serializer_class = LDAPOutpostConfigSerializer
|
||||||
|
ordering = ["name"]
|
10
authentik/providers/ldap/apps.py
Normal file
10
authentik/providers/ldap/apps.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""authentik ldap provider app config"""
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikProviderLDAPConfig(AppConfig):
|
||||||
|
"""authentik ldap provider app config"""
|
||||||
|
|
||||||
|
name = "authentik.providers.ldap"
|
||||||
|
label = "authentik_providers_ldap"
|
||||||
|
verbose_name = "authentik Providers.LDAP"
|
0
authentik/providers/ldap/controllers/__init__.py
Normal file
0
authentik/providers/ldap/controllers/__init__.py
Normal file
14
authentik/providers/ldap/controllers/docker.py
Normal file
14
authentik/providers/ldap/controllers/docker.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
"""LDAP Provider Docker Contoller"""
|
||||||
|
from authentik.outposts.controllers.base import DeploymentPort
|
||||||
|
from authentik.outposts.controllers.docker import DockerController
|
||||||
|
from authentik.outposts.models import DockerServiceConnection, Outpost
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPDockerController(DockerController):
|
||||||
|
"""LDAP Provider Docker Contoller"""
|
||||||
|
|
||||||
|
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
|
||||||
|
super().__init__(outpost, connection)
|
||||||
|
self.deployment_ports = [
|
||||||
|
DeploymentPort(389, "ldap", "tcp", 3389),
|
||||||
|
]
|
14
authentik/providers/ldap/controllers/kubernetes.py
Normal file
14
authentik/providers/ldap/controllers/kubernetes.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
"""LDAP Provider Kubernetes Contoller"""
|
||||||
|
from authentik.outposts.controllers.base import DeploymentPort
|
||||||
|
from authentik.outposts.controllers.kubernetes import KubernetesController
|
||||||
|
from authentik.outposts.models import KubernetesServiceConnection, Outpost
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPKubernetesController(KubernetesController):
|
||||||
|
"""LDAP Provider Kubernetes Contoller"""
|
||||||
|
|
||||||
|
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
|
||||||
|
super().__init__(outpost, connection)
|
||||||
|
self.deployment_ports = [
|
||||||
|
DeploymentPort(389, "ldap", "tcp", 3389),
|
||||||
|
]
|
44
authentik/providers/ldap/migrations/0001_initial.py
Normal file
44
authentik/providers/ldap/migrations/0001_initial.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-26 12:45
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0019_source_managed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="LDAPProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"base_dn",
|
||||||
|
models.TextField(
|
||||||
|
default="DC=ldap,DC=goauthentik,DC=io",
|
||||||
|
help_text="DN under which objects are accessible.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "LDAP Provider",
|
||||||
|
"verbose_name_plural": "LDAP Providers",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider", models.Model),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-26 19:57
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0019_source_managed"),
|
||||||
|
("authentik_providers_ldap", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="ldapprovider",
|
||||||
|
name="search_group",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="Users in this group can do search queries. If not set, every user can execute search queries.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="authentik_core.group",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
0
authentik/providers/ldap/migrations/__init__.py
Normal file
0
authentik/providers/ldap/migrations/__init__.py
Normal file
55
authentik/providers/ldap/models.py
Normal file
55
authentik/providers/ldap/models.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
"""LDAP Provider"""
|
||||||
|
from typing import Iterable, Optional, Type, Union
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
|
from authentik.core.models import Group, Provider
|
||||||
|
from authentik.outposts.models import OutpostModel
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPProvider(OutpostModel, Provider):
|
||||||
|
"""Allow applications to authenticate against authentik's users using LDAP."""
|
||||||
|
|
||||||
|
base_dn = models.TextField(
|
||||||
|
default="DC=ldap,DC=goauthentik,DC=io",
|
||||||
|
help_text=_("DN under which objects are accessible."),
|
||||||
|
)
|
||||||
|
|
||||||
|
search_group = models.ForeignKey(
|
||||||
|
Group,
|
||||||
|
null=True,
|
||||||
|
default=None,
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
help_text=_(
|
||||||
|
"Users in this group can do search queries. "
|
||||||
|
"If not set, every user can execute search queries."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def launch_url(self) -> Optional[str]:
|
||||||
|
"""LDAP never has a launch URL"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-ldap-form"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> Type[Serializer]:
|
||||||
|
from authentik.providers.ldap.api import LDAPProviderSerializer
|
||||||
|
|
||||||
|
return LDAPProviderSerializer
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"LDAP Provider {self.name}"
|
||||||
|
|
||||||
|
def get_required_objects(self) -> Iterable[Union[models.Model, str]]:
|
||||||
|
return [self, "authentik_core.view_user", "authentik_core.view_group"]
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("LDAP Provider")
|
||||||
|
verbose_name_plural = _("LDAP Providers")
|
@ -38,6 +38,7 @@ class OAuth2ProviderSerializer(ProviderSerializer):
|
|||||||
"client_type",
|
"client_type",
|
||||||
"client_id",
|
"client_id",
|
||||||
"client_secret",
|
"client_secret",
|
||||||
|
"access_code_validity",
|
||||||
"token_validity",
|
"token_validity",
|
||||||
"include_claims_in_id_token",
|
"include_claims_in_id_token",
|
||||||
"jwt_alg",
|
"jwt_alg",
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
"""OAuth2Provider API Views"""
|
"""OAuth2Provider API Views"""
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from guardian.utils import get_anonymous_user
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.fields import CharField, ListField
|
from rest_framework.fields import CharField, ListField
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
@ -36,13 +39,17 @@ class AuthorizationCodeViewSet(
|
|||||||
serializer_class = ExpiringBaseGrantModelSerializer
|
serializer_class = ExpiringBaseGrantModelSerializer
|
||||||
filterset_fields = ["user", "provider"]
|
filterset_fields = ["user", "provider"]
|
||||||
ordering = ["provider", "expires"]
|
ordering = ["provider", "expires"]
|
||||||
|
filter_backends = [
|
||||||
|
DjangoFilterBackend,
|
||||||
|
OrderingFilter,
|
||||||
|
SearchFilter,
|
||||||
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
if not self.request:
|
user = self.request.user if self.request else get_anonymous_user()
|
||||||
|
if user.is_superuser:
|
||||||
return super().get_queryset()
|
return super().get_queryset()
|
||||||
if self.request.user.is_superuser:
|
return super().get_queryset().filter(user=user.pk)
|
||||||
return super().get_queryset()
|
|
||||||
return super().get_queryset().filter(user=self.request.user)
|
|
||||||
|
|
||||||
|
|
||||||
class RefreshTokenViewSet(
|
class RefreshTokenViewSet(
|
||||||
@ -57,10 +64,14 @@ class RefreshTokenViewSet(
|
|||||||
serializer_class = ExpiringBaseGrantModelSerializer
|
serializer_class = ExpiringBaseGrantModelSerializer
|
||||||
filterset_fields = ["user", "provider"]
|
filterset_fields = ["user", "provider"]
|
||||||
ordering = ["provider", "expires"]
|
ordering = ["provider", "expires"]
|
||||||
|
filter_backends = [
|
||||||
|
DjangoFilterBackend,
|
||||||
|
OrderingFilter,
|
||||||
|
SearchFilter,
|
||||||
|
]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
if not self.request:
|
user = self.request.user if self.request else get_anonymous_user()
|
||||||
|
if user.is_superuser:
|
||||||
return super().get_queryset()
|
return super().get_queryset()
|
||||||
if self.request.user.is_superuser:
|
return super().get_queryset().filter(user=user.pk)
|
||||||
return super().get_queryset()
|
|
||||||
return super().get_queryset().filter(user=self.request.user)
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
"""authentik auth oauth provider app config"""
|
"""authentik oauth provider app config"""
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
class AuthentikProviderOAuth2Config(AppConfig):
|
class AuthentikProviderOAuth2Config(AppConfig):
|
||||||
"""authentik auth oauth provider app config"""
|
"""authentik oauth provider app config"""
|
||||||
|
|
||||||
name = "authentik.providers.oauth2"
|
name = "authentik.providers.oauth2"
|
||||||
label = "authentik_providers_oauth2"
|
label = "authentik_providers_oauth2"
|
||||||
|
@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 3.2 on 2021-04-28 18:17
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
import authentik.lib.utils.time
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_oauth2", "0011_managed"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="oauth2provider",
|
||||||
|
name="access_code_validity",
|
||||||
|
field=models.TextField(
|
||||||
|
default="minutes=1",
|
||||||
|
help_text="Access codes not valid on or after current time + this value (Format: hours=1;minutes=2;seconds=3).",
|
||||||
|
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -6,18 +6,18 @@ import time
|
|||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional, Type
|
from typing import Any, Optional, Type, Union
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
from dacite import from_dict
|
from dacite import from_dict
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.utils import dateformat, timezone
|
from django.utils import dateformat, timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from jwkest.jwk import Key, RSAKey, SYMKey, import_rsa_key
|
from jwt import encode
|
||||||
from jwkest.jws import JWS
|
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User
|
from authentik.core.models import ExpiringModel, PropertyMapping, Provider, User
|
||||||
@ -175,6 +175,16 @@ class OAuth2Provider(Provider):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
access_code_validity = models.TextField(
|
||||||
|
default="minutes=1",
|
||||||
|
validators=[timedelta_string_validator],
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"Access codes not valid on or after current time + this value "
|
||||||
|
"(Format: hours=1;minutes=2;seconds=3)."
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
token_validity = models.TextField(
|
token_validity = models.TextField(
|
||||||
default="minutes=10",
|
default="minutes=10",
|
||||||
validators=[timedelta_string_validator],
|
validators=[timedelta_string_validator],
|
||||||
@ -229,7 +239,7 @@ class OAuth2Provider(Provider):
|
|||||||
token.access_token = token.create_access_token(user, request)
|
token.access_token = token.create_access_token(user, request)
|
||||||
return token
|
return token
|
||||||
|
|
||||||
def get_jwt_keys(self) -> list[Key]:
|
def get_jwt_keys(self) -> Union[RSAPrivateKey, str]:
|
||||||
"""
|
"""
|
||||||
Takes a provider and returns the set of keys associated with it.
|
Takes a provider and returns the set of keys associated with it.
|
||||||
Returns a list of keys.
|
Returns a list of keys.
|
||||||
@ -246,17 +256,10 @@ class OAuth2Provider(Provider):
|
|||||||
self.jwt_alg = JWTAlgorithms.HS256
|
self.jwt_alg = JWTAlgorithms.HS256
|
||||||
self.save()
|
self.save()
|
||||||
else:
|
else:
|
||||||
# Because the JWT Library uses python cryptodome,
|
return self.rsa_key.private_key
|
||||||
# we can't directly pass the RSAPublicKey
|
|
||||||
# object, but have to load it ourselves
|
|
||||||
key = import_rsa_key(self.rsa_key.key_data)
|
|
||||||
keys = [RSAKey(key=key, kid=self.rsa_key.kid)]
|
|
||||||
if not keys:
|
|
||||||
raise Exception("You must add at least one RSA Key.")
|
|
||||||
return keys
|
|
||||||
|
|
||||||
if self.jwt_alg == JWTAlgorithms.HS256:
|
if self.jwt_alg == JWTAlgorithms.HS256:
|
||||||
return [SYMKey(key=self.client_secret, alg=self.jwt_alg)]
|
return self.client_secret
|
||||||
|
|
||||||
raise Exception("Unsupported key algorithm.")
|
raise Exception("Unsupported key algorithm.")
|
||||||
|
|
||||||
@ -297,11 +300,11 @@ class OAuth2Provider(Provider):
|
|||||||
|
|
||||||
def encode(self, payload: dict[str, Any]) -> str:
|
def encode(self, payload: dict[str, Any]) -> str:
|
||||||
"""Represent the ID Token as a JSON Web Token (JWT)."""
|
"""Represent the ID Token as a JSON Web Token (JWT)."""
|
||||||
keys = self.get_jwt_keys()
|
key = self.get_jwt_keys()
|
||||||
# If the provider does not have an RSA Key assigned, it was switched to Symmetric
|
# If the provider does not have an RSA Key assigned, it was switched to Symmetric
|
||||||
self.refresh_from_db()
|
self.refresh_from_db()
|
||||||
jws = JWS(payload, alg=self.jwt_alg)
|
# pyright: reportGeneralTypeIssues=false
|
||||||
return jws.sign_compact(keys)
|
return encode(payload, key, algorithm=self.jwt_alg)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block title %}
|
{% block title %}
|
||||||
{% trans 'End session' %}
|
{% trans 'End session' %} - {{ config.authentik.branding.title }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block card_title %}
|
{% block card_title %}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""Test authorize view"""
|
"""Test authorize view"""
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
|
|
||||||
@ -11,17 +11,21 @@ from authentik.providers.oauth2.errors import (
|
|||||||
ClientIdError,
|
ClientIdError,
|
||||||
RedirectUriError,
|
RedirectUriError,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.generators import generate_client_id
|
from authentik.providers.oauth2.generators import (
|
||||||
|
generate_client_id,
|
||||||
|
generate_client_secret,
|
||||||
|
)
|
||||||
from authentik.providers.oauth2.models import (
|
from authentik.providers.oauth2.models import (
|
||||||
AuthorizationCode,
|
AuthorizationCode,
|
||||||
GrantTypes,
|
GrantTypes,
|
||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||||
from authentik.providers.oauth2.views.authorize import OAuthAuthorizationParams
|
from authentik.providers.oauth2.views.authorize import OAuthAuthorizationParams
|
||||||
|
|
||||||
|
|
||||||
class TestViewsAuthorize(TestCase):
|
class TestAuthorize(OAuthTestCase):
|
||||||
"""Test authorize view"""
|
"""Test authorize view"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
@ -166,7 +170,7 @@ class TestViewsAuthorize(TestCase):
|
|||||||
name="test",
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
authorization_flow=flow,
|
authorization_flow=flow,
|
||||||
redirect_uris="http://localhost",
|
redirect_uris="foo://localhost",
|
||||||
)
|
)
|
||||||
Application.objects.create(name="app", slug="app", provider=provider)
|
Application.objects.create(name="app", slug="app", provider=provider)
|
||||||
state = generate_client_id()
|
state = generate_client_id()
|
||||||
@ -179,7 +183,7 @@ class TestViewsAuthorize(TestCase):
|
|||||||
"response_type": "code",
|
"response_type": "code",
|
||||||
"client_id": "test",
|
"client_id": "test",
|
||||||
"state": state,
|
"state": state,
|
||||||
"redirect_uri": "http://localhost",
|
"redirect_uri": "foo://localhost",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
@ -190,7 +194,7 @@ class TestViewsAuthorize(TestCase):
|
|||||||
force_str(response.content),
|
force_str(response.content),
|
||||||
{
|
{
|
||||||
"type": ChallengeTypes.REDIRECT.value,
|
"type": ChallengeTypes.REDIRECT.value,
|
||||||
"to": f"http://localhost?code={code.code}&state={state}",
|
"to": f"foo://localhost?code={code.code}&state={state}",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -200,6 +204,7 @@ class TestViewsAuthorize(TestCase):
|
|||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
|
client_secret=generate_client_secret(),
|
||||||
authorization_flow=flow,
|
authorization_flow=flow,
|
||||||
redirect_uris="http://localhost",
|
redirect_uris="http://localhost",
|
||||||
)
|
)
|
||||||
@ -233,3 +238,4 @@ class TestViewsAuthorize(TestCase):
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
self.validate_jwt(token, provider)
|
@ -1,11 +1,11 @@
|
|||||||
"""Test token view"""
|
"""Test token view"""
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import Application, User
|
||||||
from authentik.flows.models import Flow
|
from authentik.flows.models import Flow
|
||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
GRANT_TYPE_AUTHORIZATION_CODE,
|
GRANT_TYPE_AUTHORIZATION_CODE,
|
||||||
@ -20,15 +20,17 @@ from authentik.providers.oauth2.models import (
|
|||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||||
from authentik.providers.oauth2.views.token import TokenParams
|
from authentik.providers.oauth2.views.token import TokenParams
|
||||||
|
|
||||||
|
|
||||||
class TestViewsToken(TestCase):
|
class TestToken(OAuthTestCase):
|
||||||
"""Test token view"""
|
"""Test token view"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.factory = RequestFactory()
|
self.factory = RequestFactory()
|
||||||
|
self.app = Application.objects.create(name="test", slug="test")
|
||||||
|
|
||||||
def test_request_auth_code(self):
|
def test_request_auth_code(self):
|
||||||
"""test request param"""
|
"""test request param"""
|
||||||
@ -97,12 +99,15 @@ class TestViewsToken(TestCase):
|
|||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
redirect_uris="http://local.invalid",
|
redirect_uris="http://local.invalid",
|
||||||
)
|
)
|
||||||
|
# Needs to be assigned to an application for iss to be set
|
||||||
|
self.app.provider = provider
|
||||||
|
self.app.save()
|
||||||
header = b64encode(
|
header = b64encode(
|
||||||
f"{provider.client_id}:{provider.client_secret}".encode()
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
).decode()
|
).decode()
|
||||||
user = User.objects.get(username="akadmin")
|
user = User.objects.get(username="akadmin")
|
||||||
code = AuthorizationCode.objects.create(
|
code = AuthorizationCode.objects.create(
|
||||||
code="foobar", provider=provider, user=user
|
code="foobar", provider=provider, user=user, is_open_id=True
|
||||||
)
|
)
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
reverse("authentik_providers_oauth2:token"),
|
reverse("authentik_providers_oauth2:token"),
|
||||||
@ -126,8 +131,61 @@ class TestViewsToken(TestCase):
|
|||||||
),
|
),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
self.validate_jwt(new_token, provider)
|
||||||
|
|
||||||
def test_refresh_token_view(self):
|
def test_refresh_token_view(self):
|
||||||
|
"""test request param"""
|
||||||
|
provider = OAuth2Provider.objects.create(
|
||||||
|
name="test",
|
||||||
|
client_id=generate_client_id(),
|
||||||
|
client_secret=generate_client_secret(),
|
||||||
|
authorization_flow=Flow.objects.first(),
|
||||||
|
redirect_uris="http://local.invalid",
|
||||||
|
)
|
||||||
|
# Needs to be assigned to an application for iss to be set
|
||||||
|
self.app.provider = provider
|
||||||
|
self.app.save()
|
||||||
|
header = b64encode(
|
||||||
|
f"{provider.client_id}:{provider.client_secret}".encode()
|
||||||
|
).decode()
|
||||||
|
user = User.objects.get(username="akadmin")
|
||||||
|
token: RefreshToken = RefreshToken.objects.create(
|
||||||
|
provider=provider,
|
||||||
|
user=user,
|
||||||
|
refresh_token=generate_client_id(),
|
||||||
|
)
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
data={
|
||||||
|
"grant_type": GRANT_TYPE_REFRESH_TOKEN,
|
||||||
|
"refresh_token": token.refresh_token,
|
||||||
|
"redirect_uri": "http://local.invalid",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
|
HTTP_ORIGIN="http://local.invalid",
|
||||||
|
)
|
||||||
|
new_token: RefreshToken = (
|
||||||
|
RefreshToken.objects.filter(user=user).exclude(pk=token.pk).first()
|
||||||
|
)
|
||||||
|
self.assertEqual(response["Access-Control-Allow-Credentials"], "true")
|
||||||
|
self.assertEqual(
|
||||||
|
response["Access-Control-Allow-Origin"], "http://local.invalid"
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
force_str(response.content),
|
||||||
|
{
|
||||||
|
"access_token": new_token.access_token,
|
||||||
|
"refresh_token": new_token.refresh_token,
|
||||||
|
"token_type": "bearer",
|
||||||
|
"expires_in": 600,
|
||||||
|
"id_token": provider.encode(
|
||||||
|
new_token.id_token.to_dict(),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.validate_jwt(new_token, provider)
|
||||||
|
|
||||||
|
def test_refresh_token_view_invalid_origin(self):
|
||||||
"""test request param"""
|
"""test request param"""
|
||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
@ -153,10 +211,13 @@ class TestViewsToken(TestCase):
|
|||||||
"redirect_uri": "http://local.invalid",
|
"redirect_uri": "http://local.invalid",
|
||||||
},
|
},
|
||||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||||
|
HTTP_ORIGIN="http://another.invalid",
|
||||||
)
|
)
|
||||||
new_token: RefreshToken = (
|
new_token: RefreshToken = (
|
||||||
RefreshToken.objects.filter(user=user).exclude(pk=token.pk).first()
|
RefreshToken.objects.filter(user=user).exclude(pk=token.pk).first()
|
||||||
)
|
)
|
||||||
|
self.assertNotIn("Access-Control-Allow-Credentials", response)
|
||||||
|
self.assertNotIn("Access-Control-Allow-Origin", response)
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
force_str(response.content),
|
force_str(response.content),
|
||||||
{
|
{
|
31
authentik/providers/oauth2/tests/utils.py
Normal file
31
authentik/providers/oauth2/tests/utils.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""OAuth test helpers"""
|
||||||
|
from django.test import TestCase
|
||||||
|
from jwt import decode
|
||||||
|
|
||||||
|
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthTestCase(TestCase):
|
||||||
|
"""OAuth test helpers"""
|
||||||
|
|
||||||
|
required_jwt_keys = [
|
||||||
|
"exp",
|
||||||
|
"iat",
|
||||||
|
"auth_time",
|
||||||
|
"acr",
|
||||||
|
"sub",
|
||||||
|
"iss",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate_jwt(self, token: RefreshToken, provider: OAuth2Provider):
|
||||||
|
"""Validate that all required fields are set"""
|
||||||
|
jwt = decode(
|
||||||
|
token.access_token,
|
||||||
|
provider.client_secret,
|
||||||
|
algorithms=[provider.jwt_alg],
|
||||||
|
audience=provider.client_id,
|
||||||
|
)
|
||||||
|
id_token = token.id_token.to_dict()
|
||||||
|
for key in self.required_jwt_keys:
|
||||||
|
self.assertIsNotNone(jwt[key], f"Key {key} is missing in access_token")
|
||||||
|
self.assertIsNotNone(id_token[key], f"Key {key} is missing in id_token")
|
@ -2,10 +2,11 @@
|
|||||||
import re
|
import re
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
from binascii import Error
|
from binascii import Error
|
||||||
from typing import Optional
|
from typing import Any, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||||
|
from django.http.response import HttpResponseRedirect
|
||||||
from django.utils.cache import patch_vary_headers
|
from django.utils.cache import patch_vary_headers
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
@ -26,8 +27,8 @@ class TokenResponse(JsonResponse):
|
|||||||
self["Pragma"] = "no-cache"
|
self["Pragma"] = "no-cache"
|
||||||
|
|
||||||
|
|
||||||
def cors_allow_any(request: HttpRequest, response: HttpResponse, *allowed_origins: str):
|
def cors_allow(request: HttpRequest, response: HttpResponse, *allowed_origins: str):
|
||||||
"""Add headers to permit CORS requests from any origin, with or without credentials,
|
"""Add headers to permit CORS requests from allowed_origins, with or without credentials,
|
||||||
with any headers."""
|
with any headers."""
|
||||||
origin = request.META.get("HTTP_ORIGIN")
|
origin = request.META.get("HTTP_ORIGIN")
|
||||||
if not origin:
|
if not origin:
|
||||||
@ -161,3 +162,18 @@ def protected_resource_view(scopes: list[str]):
|
|||||||
return view_wrapper
|
return view_wrapper
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class HttpResponseRedirectScheme(HttpResponseRedirect):
|
||||||
|
"""HTTP Response to redirect, can be to a non-http scheme"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
redirect_to: str,
|
||||||
|
*args: Any,
|
||||||
|
allowed_schemes: Optional[list[str]] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
self.allowed_schemes = allowed_schemes or ["http", "https", "ftp"]
|
||||||
|
# pyright: reportGeneralTypeIssues=false
|
||||||
|
super().__init__(redirect_to, *args, **kwargs)
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
from urllib.parse import parse_qs, urlencode, urlparse, urlsplit, urlunsplit
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
from django.http.response import Http404, HttpResponseBadRequest, HttpResponseRedirect
|
from django.http.response import Http404, HttpResponseBadRequest, HttpResponseRedirect
|
||||||
from django.shortcuts import get_object_or_404, redirect
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
@ -46,6 +46,7 @@ from authentik.providers.oauth2.models import (
|
|||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
ResponseTypes,
|
ResponseTypes,
|
||||||
)
|
)
|
||||||
|
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
||||||
from authentik.providers.oauth2.views.userinfo import UserInfoView
|
from authentik.providers.oauth2.views.userinfo import UserInfoView
|
||||||
from authentik.stages.consent.models import ConsentMode, ConsentStage
|
from authentik.stages.consent.models import ConsentMode, ConsentStage
|
||||||
from authentik.stages.consent.stage import (
|
from authentik.stages.consent.stage import (
|
||||||
@ -218,7 +219,7 @@ class OAuthAuthorizationParams:
|
|||||||
code.code_challenge_method = self.code_challenge_method
|
code.code_challenge_method = self.code_challenge_method
|
||||||
|
|
||||||
code.expires_at = timezone.now() + timedelta_from_string(
|
code.expires_at = timezone.now() + timedelta_from_string(
|
||||||
self.provider.token_validity
|
self.provider.access_code_validity
|
||||||
)
|
)
|
||||||
code.scope = self.scope
|
code.scope = self.scope
|
||||||
code.nonce = self.nonce
|
code.nonce = self.nonce
|
||||||
@ -233,6 +234,11 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
params: OAuthAuthorizationParams
|
params: OAuthAuthorizationParams
|
||||||
provider: OAuth2Provider
|
provider: OAuth2Provider
|
||||||
|
|
||||||
|
def redirect(self, uri: str) -> HttpResponse:
|
||||||
|
"""Redirect using HttpResponseRedirectScheme, compatible with non-http schemes"""
|
||||||
|
parsed = urlparse(uri)
|
||||||
|
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""final Stage of an OAuth2 Flow"""
|
"""final Stage of an OAuth2 Flow"""
|
||||||
@ -261,7 +267,7 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
flow=self.executor.plan.flow_pk,
|
flow=self.executor.plan.flow_pk,
|
||||||
scopes=", ".join(self.params.scope),
|
scopes=", ".join(self.params.scope),
|
||||||
).from_http(self.request)
|
).from_http(self.request)
|
||||||
return redirect(self.create_response_uri())
|
return self.redirect(self.create_response_uri())
|
||||||
except (ClientIdError, RedirectUriError) as error:
|
except (ClientIdError, RedirectUriError) as error:
|
||||||
error.to_event(application=application).from_http(request)
|
error.to_event(application=application).from_http(request)
|
||||||
self.executor.stage_invalid()
|
self.executor.stage_invalid()
|
||||||
@ -270,7 +276,7 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
except AuthorizeError as error:
|
except AuthorizeError as error:
|
||||||
error.to_event(application=application).from_http(request)
|
error.to_event(application=application).from_http(request)
|
||||||
self.executor.stage_invalid()
|
self.executor.stage_invalid()
|
||||||
return redirect(error.create_uri())
|
return self.redirect(error.create_uri())
|
||||||
|
|
||||||
def create_response_uri(self) -> str:
|
def create_response_uri(self) -> str:
|
||||||
"""Create a final Response URI the user is redirected to."""
|
"""Create a final Response URI the user is redirected to."""
|
||||||
@ -285,7 +291,7 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
GrantTypes.HYBRID,
|
GrantTypes.HYBRID,
|
||||||
]:
|
]:
|
||||||
code = self.params.create_code(self.request)
|
code = self.params.create_code(self.request)
|
||||||
code.save()
|
code.save(force_insert=True)
|
||||||
|
|
||||||
if self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
if self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
||||||
query_params["code"] = code.code
|
query_params["code"] = code.code
|
||||||
@ -304,7 +310,7 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
return urlunsplit(uri)
|
return urlunsplit(uri)
|
||||||
raise OAuth2Error()
|
raise OAuth2Error()
|
||||||
except OAuth2Error as error:
|
except OAuth2Error as error:
|
||||||
LOGGER.exception("Error when trying to create response uri", error=error)
|
LOGGER.warning("Error when trying to create response uri", error=error)
|
||||||
raise AuthorizeError(
|
raise AuthorizeError(
|
||||||
self.params.redirect_uri,
|
self.params.redirect_uri,
|
||||||
"server_error",
|
"server_error",
|
||||||
|
@ -104,7 +104,6 @@ class TokenIntrospectionView(View):
|
|||||||
token: RefreshToken
|
token: RefreshToken
|
||||||
params: TokenIntrospectionParams
|
params: TokenIntrospectionParams
|
||||||
provider: OAuth2Provider
|
provider: OAuth2Provider
|
||||||
id_token: IDToken
|
|
||||||
|
|
||||||
def post(self, request: HttpRequest) -> HttpResponse:
|
def post(self, request: HttpRequest) -> HttpResponse:
|
||||||
"""Introspection handler"""
|
"""Introspection handler"""
|
||||||
|
@ -1,14 +1,23 @@
|
|||||||
"""authentik OAuth2 JWKS Views"""
|
"""authentik OAuth2 JWKS Views"""
|
||||||
|
from base64 import urlsafe_b64encode
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
|
||||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.views import View
|
from django.views import View
|
||||||
from jwkest import long_to_base64
|
|
||||||
from jwkest.jwk import import_rsa_key
|
|
||||||
|
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
def b64_enc(number: int) -> str:
|
||||||
|
"""Convert number to base64-encoded octet-value"""
|
||||||
|
length = ((number).bit_length() + 7) // 8
|
||||||
|
number_bytes = number.to_bytes(length, "big")
|
||||||
|
final = urlsafe_b64encode(number_bytes).rstrip(b"=")
|
||||||
|
return final.decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
class JWKSView(View):
|
class JWKSView(View):
|
||||||
"""Show RSA Key data for Provider"""
|
"""Show RSA Key data for Provider"""
|
||||||
|
|
||||||
@ -22,15 +31,16 @@ class JWKSView(View):
|
|||||||
response_data = {}
|
response_data = {}
|
||||||
|
|
||||||
if provider.jwt_alg == JWTAlgorithms.RS256:
|
if provider.jwt_alg == JWTAlgorithms.RS256:
|
||||||
public_key = import_rsa_key(provider.rsa_key.key_data).publickey()
|
public_key: RSAPublicKey = provider.rsa_key.private_key.public_key()
|
||||||
|
public_numbers = public_key.public_numbers()
|
||||||
response_data["keys"] = [
|
response_data["keys"] = [
|
||||||
{
|
{
|
||||||
"kty": "RSA",
|
"kty": "RSA",
|
||||||
"alg": "RS256",
|
"alg": "RS256",
|
||||||
"use": "sig",
|
"use": "sig",
|
||||||
"kid": provider.rsa_key.kid,
|
"kid": provider.rsa_key.kid,
|
||||||
"n": long_to_base64(public_key.n),
|
"n": b64_enc(public_numbers.n),
|
||||||
"e": long_to_base64(public_key.e),
|
"e": b64_enc(public_numbers.e),
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from authentik.providers.oauth2.models import (
|
|||||||
ResponseTypes,
|
ResponseTypes,
|
||||||
ScopeMapping,
|
ScopeMapping,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.utils import cors_allow_any
|
from authentik.providers.oauth2.utils import cors_allow
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -112,5 +112,5 @@ class ProviderInfoView(View):
|
|||||||
OAuth2Provider, pk=application.provider_id
|
OAuth2Provider, pk=application.provider_id
|
||||||
)
|
)
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
cors_allow_any(request, response, *self.provider.redirect_uris.split("\n"))
|
cors_allow(request, response, *self.provider.redirect_uris.split("\n"))
|
||||||
return response
|
return response
|
||||||
|
@ -16,10 +16,15 @@ from authentik.providers.oauth2.constants import (
|
|||||||
from authentik.providers.oauth2.errors import TokenError, UserAuthError
|
from authentik.providers.oauth2.errors import TokenError, UserAuthError
|
||||||
from authentik.providers.oauth2.models import (
|
from authentik.providers.oauth2.models import (
|
||||||
AuthorizationCode,
|
AuthorizationCode,
|
||||||
|
ClientTypes,
|
||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.utils import TokenResponse, extract_client_auth
|
from authentik.providers.oauth2.utils import (
|
||||||
|
TokenResponse,
|
||||||
|
cors_allow,
|
||||||
|
extract_client_auth,
|
||||||
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -71,7 +76,7 @@ class TokenParams:
|
|||||||
LOGGER.warning("OAuth2Provider does not exist", client_id=self.client_id)
|
LOGGER.warning("OAuth2Provider does not exist", client_id=self.client_id)
|
||||||
raise TokenError("invalid_client")
|
raise TokenError("invalid_client")
|
||||||
|
|
||||||
if self.provider.client_type == "confidential":
|
if self.provider.client_type == ClientTypes.CONFIDENTIAL:
|
||||||
if self.provider.client_secret != self.client_secret:
|
if self.provider.client_secret != self.client_secret:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Invalid client secret: client does not have secret",
|
"Invalid client secret: client does not have secret",
|
||||||
@ -154,7 +159,18 @@ class TokenParams:
|
|||||||
class TokenView(View):
|
class TokenView(View):
|
||||||
"""Generate tokens for clients"""
|
"""Generate tokens for clients"""
|
||||||
|
|
||||||
params: TokenParams
|
params: Optional[TokenParams] = None
|
||||||
|
|
||||||
|
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||||
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
|
allowed_origins = []
|
||||||
|
if self.params:
|
||||||
|
allowed_origins = self.params.provider.redirect_uris.split("\n")
|
||||||
|
cors_allow(self.request, response, *allowed_origins)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def options(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
return TokenResponse({})
|
||||||
|
|
||||||
def post(self, request: HttpRequest) -> HttpResponse:
|
def post(self, request: HttpRequest) -> HttpResponse:
|
||||||
"""Generate tokens for clients"""
|
"""Generate tokens for clients"""
|
||||||
|
@ -14,7 +14,7 @@ from authentik.providers.oauth2.constants import (
|
|||||||
SCOPE_GITHUB_USER_READ,
|
SCOPE_GITHUB_USER_READ,
|
||||||
)
|
)
|
||||||
from authentik.providers.oauth2.models import RefreshToken, ScopeMapping
|
from authentik.providers.oauth2.models import RefreshToken, ScopeMapping
|
||||||
from authentik.providers.oauth2.utils import TokenResponse, cors_allow_any
|
from authentik.providers.oauth2.utils import TokenResponse, cors_allow
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -88,7 +88,7 @@ class UserInfoView(View):
|
|||||||
allowed_origins = []
|
allowed_origins = []
|
||||||
if self.token:
|
if self.token:
|
||||||
allowed_origins = self.token.provider.redirect_uris.split("\n")
|
allowed_origins = self.token.provider.redirect_uris.split("\n")
|
||||||
cors_allow_any(self.request, response, *allowed_origins)
|
cors_allow(self.request, response, *allowed_origins)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def options(self, request: HttpRequest) -> HttpResponse:
|
def options(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
"""ProxyProvider API Views"""
|
"""ProxyProvider API Views"""
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from drf_yasg.utils import swagger_serializer_method
|
from drf_yasg.utils import swagger_serializer_method
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField, ListField, SerializerMethodField
|
from rest_framework.fields import CharField, ListField, SerializerMethodField
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||||
@ -30,6 +33,19 @@ class OpenIDConnectConfigurationSerializer(PassiveSerializer):
|
|||||||
class ProxyProviderSerializer(ProviderSerializer):
|
class ProxyProviderSerializer(ProviderSerializer):
|
||||||
"""ProxyProvider Serializer"""
|
"""ProxyProvider Serializer"""
|
||||||
|
|
||||||
|
redirect_uris = CharField(read_only=True)
|
||||||
|
|
||||||
|
def validate(self, attrs) -> dict[Any, str]:
|
||||||
|
"""Check that internal_host is set when forward_auth_mode is disabled"""
|
||||||
|
if (
|
||||||
|
not attrs.get("forward_auth_mode", False)
|
||||||
|
and attrs.get("internal_host", "") == ""
|
||||||
|
):
|
||||||
|
raise ValidationError(
|
||||||
|
"Internal host cannot be empty when forward auth is disabled."
|
||||||
|
)
|
||||||
|
return attrs
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
instance: ProxyProvider = super().create(validated_data)
|
instance: ProxyProvider = super().create(validated_data)
|
||||||
instance.set_oauth_defaults()
|
instance.set_oauth_defaults()
|
||||||
@ -52,6 +68,8 @@ class ProxyProviderSerializer(ProviderSerializer):
|
|||||||
"basic_auth_enabled",
|
"basic_auth_enabled",
|
||||||
"basic_auth_password_attribute",
|
"basic_auth_password_attribute",
|
||||||
"basic_auth_user_attribute",
|
"basic_auth_user_attribute",
|
||||||
|
"forward_auth_mode",
|
||||||
|
"redirect_uris",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -86,6 +104,7 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
|||||||
"basic_auth_enabled",
|
"basic_auth_enabled",
|
||||||
"basic_auth_password_attribute",
|
"basic_auth_password_attribute",
|
||||||
"basic_auth_user_attribute",
|
"basic_auth_user_attribute",
|
||||||
|
"forward_auth_mode",
|
||||||
]
|
]
|
||||||
|
|
||||||
@swagger_serializer_method(serializer_or_field=OpenIDConnectConfigurationSerializer)
|
@swagger_serializer_method(serializer_or_field=OpenIDConnectConfigurationSerializer)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user