Compare commits
208 Commits
version/20
...
sources/ld
Author | SHA1 | Date | |
---|---|---|---|
2a6479062f | |||
52463b8f96 | |||
330f639a7e | |||
85ea4651e4 | |||
b15002a992 | |||
82cbc16c45 | |||
4833a87009 | |||
4e42c1df2a | |||
1dd39b2612 | |||
f7927114e5 | |||
4bb53fc3e8 | |||
8e39ad2cda | |||
e55e27d060 | |||
c93c6ee6f9 | |||
90aa5409cd | |||
017771ddf7 | |||
0e5952650b | |||
e807f9f12c | |||
3e81824388 | |||
44ac944706 | |||
ee151b9e17 | |||
0f87d97594 | |||
5fcf4cb592 | |||
47f6ed48dd | |||
c92f416146 | |||
310099650f | |||
0d6c4c41fd | |||
036a1cbde8 | |||
2398157b0b | |||
ce1a071d16 | |||
ea264ffc13 | |||
80e86c52e7 | |||
07ca318535 | |||
01ea6a402f | |||
385f949238 | |||
af33f8c014 | |||
bcf7545cad | |||
bf149a1102 | |||
3dfb10ae23 | |||
1853ce1591 | |||
b88b469f94 | |||
1a50e9f8d0 | |||
db2c0667a9 | |||
671b7156ed | |||
355c5f0f74 | |||
cfb392196b | |||
7c3194e9b5 | |||
a32755b6c8 | |||
9ab3f26082 | |||
37bd01998a | |||
0e0661b395 | |||
5f3bacb7a9 | |||
475ef8b057 | |||
16db9f220a | |||
0370040473 | |||
9a35e893ec | |||
87e37af273 | |||
7585f2aa9a | |||
cf9a094019 | |||
a16c0e5e8f | |||
3772379e1c | |||
99a42c6fd8 | |||
000244e387 | |||
42c3cfa65d | |||
70630aab3a | |||
e0328d8373 | |||
177d1614ee | |||
14396cb70f | |||
db66b00494 | |||
98648bce46 | |||
5aa11eb102 | |||
0d68c467bd | |||
884425e630 | |||
2da6b5078c | |||
7649a57495 | |||
583c5e3ba7 | |||
01eea902ec | |||
3d91773191 | |||
de15bdcdba | |||
dcef5438f1 | |||
21d8089074 | |||
af8c4b3cd0 | |||
1ae4ed55ae | |||
e070dda67f | |||
a4cf5c7e90 | |||
196aa5e213 | |||
a0d2aca61c | |||
55dd7013b4 | |||
f1ce694c21 | |||
895c6a349c | |||
687bc3a4b4 | |||
97d57adb3b | |||
a9398c92ce | |||
1aebfd2370 | |||
8c71a78696 | |||
67de1fcd68 | |||
d35c7df789 | |||
3f8be6e9d4 | |||
28702b3a25 | |||
58aa7ec623 | |||
0caa17623f | |||
e17667de79 | |||
ac312cccbc | |||
b10599fa45 | |||
1dddb3dfaf | |||
49cb7adc43 | |||
9ccbe28209 | |||
4dee89db00 | |||
61326bbada | |||
fd5d49541f | |||
c80630fb6f | |||
ec9d9f629d | |||
c79e90964a | |||
515ce94a85 | |||
b4eb5be580 | |||
0f93e283f8 | |||
f811266ba5 | |||
4c823b7428 | |||
e494756aa5 | |||
4713010034 | |||
b0242cca2b | |||
f5222ef321 | |||
5b6fb4a05a | |||
6eb33f4f6c | |||
7dc2bf119b | |||
b3966a5e7c | |||
0580f32fe6 | |||
74ee97b472 | |||
7b7c80364f | |||
c55f26ca70 | |||
a7a4b18082 | |||
61bdbf243a | |||
a1deaf7b87 | |||
fc27e4e3d0 | |||
ab837558c4 | |||
fe0ecb9013 | |||
bf15e7b169 | |||
f75c42ea7e | |||
2fdafca4eb | |||
e507a38d43 | |||
aed01e9d5b | |||
5ac30c4901 | |||
f8b690dbec | |||
e45b57071a | |||
ed3d0c9021 | |||
53e60641ba | |||
ab4af40b06 | |||
797792dec8 | |||
6f37ab2c17 | |||
04befe38bc | |||
4f23dc0485 | |||
3d0f5ea21c | |||
59b7532ef6 | |||
1b6fd30b4c | |||
8507e1929c | |||
06850a2f57 | |||
619927a7d4 | |||
279150541d | |||
09880e3412 | |||
420b51ca1d | |||
ad052564dd | |||
1edc32dad0 | |||
8ef33e0285 | |||
eeb124e869 | |||
b5c52daa8f | |||
507255524a | |||
8d71dc3ba8 | |||
5f02b31e64 | |||
cf2f9d4c79 | |||
febbbca728 | |||
b8f9fdf10a | |||
dda69f2bcc | |||
5ea67398ae | |||
d79ed5a152 | |||
fb35e38323 | |||
2c8f8b9e13 | |||
912f8da915 | |||
8eaef887aa | |||
d9bdf79f0e | |||
44e106878b | |||
0a9880547c | |||
bbdf8c054b | |||
8c3f578187 | |||
e373bae189 | |||
7cbce1bb3d | |||
15ac26edb8 | |||
c0676b3720 | |||
d437927ee5 | |||
6a9ca493ed | |||
1a2ab34586 | |||
12779ffb5f | |||
5f8e33667f | |||
46ae61e68b | |||
a610d11768 | |||
c5f0b89a02 | |||
6aeef42e5b | |||
6612f729ec | |||
3f12c7c013 | |||
7e51d9d52f | |||
5ded88127a | |||
7030176183 | |||
12f3f8c29e | |||
2b9dc4ccd8 | |||
3970c38752 | |||
db61d6200a | |||
7f9e8f469d | |||
fd561ac802 | |||
c04e83c86c |
@ -1,10 +1,11 @@
|
|||||||
env
|
|
||||||
htmlcov
|
htmlcov
|
||||||
*.env.yml
|
*.env.yml
|
||||||
**/node_modules
|
**/node_modules
|
||||||
dist/**
|
dist/**
|
||||||
build/**
|
build/**
|
||||||
build_docs/**
|
build_docs/**
|
||||||
Dockerfile
|
*Dockerfile
|
||||||
authentik/enterprise
|
|
||||||
blueprints/local
|
blueprints/local
|
||||||
|
.git
|
||||||
|
!gen-ts-api/node_modules
|
||||||
|
!gen-ts-api/dist/**
|
||||||
|
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@ -23,7 +23,7 @@ runs:
|
|||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Setup dependencies
|
- name: Setup dependencies
|
||||||
|
42
.github/workflows/ci-main.yml
vendored
42
.github/workflows/ci-main.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
- ruff
|
- ruff
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run job
|
- name: run job
|
||||||
@ -41,7 +41,7 @@ jobs:
|
|||||||
test-migrations:
|
test-migrations:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
@ -50,7 +50,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -91,7 +91,7 @@ jobs:
|
|||||||
- 12-alpine
|
- 12-alpine
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
@ -108,7 +108,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
@ -144,7 +144,7 @@ jobs:
|
|||||||
- name: flows
|
- name: flows
|
||||||
glob: tests/e2e/test_flows*
|
glob: tests/e2e/test_flows*
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Setup e2e env (chrome, etc)
|
- name: Setup e2e env (chrome, etc)
|
||||||
@ -186,28 +186,31 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.2.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: generate ts client
|
||||||
|
run: make gen-client-ts
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
|
context: .
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
@ -220,6 +223,8 @@ jobs:
|
|||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
- name: Comment on PR
|
- name: Comment on PR
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@ -231,28 +236,31 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.2.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: generate ts client
|
||||||
|
run: make gen-client-ts
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
|
context: .
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
@ -266,3 +274,5 @@ jobs:
|
|||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/arm64
|
platforms: linux/arm64
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
24
.github/workflows/ci-outpost.yml
vendored
24
.github/workflows/ci-outpost.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
|||||||
lint-golint:
|
lint-golint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
@ -31,14 +31,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
version: v1.52.2
|
version: v1.52.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
skip-pkg-cache: true
|
skip-cache: true
|
||||||
test-unittest:
|
test-unittest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
|
- name: Setup authentik env
|
||||||
|
uses: ./.github/actions/setup
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Go unittests
|
- name: Go unittests
|
||||||
@ -64,20 +66,20 @@ jobs:
|
|||||||
- radius
|
- radius
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.2.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
@ -86,7 +88,7 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
tags: |
|
||||||
@ -99,6 +101,8 @@ jobs:
|
|||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
build-binary:
|
build-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
@ -114,7 +118,7 @@ jobs:
|
|||||||
goos: [linux]
|
goos: [linux]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
@ -122,7 +126,7 @@ jobs:
|
|||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
|
10
.github/workflows/ci-web.yml
vendored
10
.github/workflows/ci-web.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
@ -33,7 +33,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
@ -49,7 +49,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
@ -65,7 +65,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
@ -97,7 +97,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
6
.github/workflows/ci-website.yml
vendored
6
.github/workflows/ci-website.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
@ -31,7 +31,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
@ -52,7 +52,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
|||||||
language: ["go", "javascript", "python"]
|
language: ["go", "javascript", "python"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
2
.github/workflows/gha-cache-cleanup.yml
vendored
2
.github/workflows/gha-cache-cleanup.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
run: |
|
run: |
|
||||||
|
2
.github/workflows/ghcr-retention.yml
vendored
2
.github/workflows/ghcr-retention.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
4
.github/workflows/image-compress.yml
vendored
4
.github/workflows/image-compress.yml
vendored
@ -29,11 +29,11 @@ jobs:
|
|||||||
github.event.pull_request.head.repo.full_name == github.repository)
|
github.event.pull_request.head.repo.full_name == github.repository)
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Compress images
|
- name: Compress images
|
||||||
|
2
.github/workflows/publish-source-docs.yml
vendored
2
.github/workflows/publish-source-docs.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: generate docs
|
- name: generate docs
|
||||||
|
2
.github/workflows/release-next-branch.yml
vendored
2
.github/workflows/release-next-branch.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: internal-production
|
environment: internal-production
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
- run: |
|
- run: |
|
||||||
|
35
.github/workflows/release-publish.yml
vendored
35
.github/workflows/release-publish.yml
vendored
@ -8,28 +8,31 @@ jobs:
|
|||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.2.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: make empty ts client
|
||||||
|
run: mkdir -p ./gen-ts-client
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
|
context: .
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
@ -55,30 +58,30 @@ jobs:
|
|||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2.2.0
|
uses: docker/setup-qemu-action@v3.0.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
@ -106,13 +109,13 @@ jobs:
|
|||||||
goos: [linux, darwin]
|
goos: [linux, darwin]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Build web
|
- name: Build web
|
||||||
@ -141,7 +144,7 @@ jobs:
|
|||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
@ -157,7 +160,7 @@ jobs:
|
|||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
4
.github/workflows/release-tag.yml
vendored
4
.github/workflows/release-tag.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
name: Create Release from Tag
|
name: Create Release from Tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker-compose run -u root server test-all
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
@ -16,11 +16,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
|
2
.github/workflows/translation-rename.yml
vendored
2
.github/workflows/translation-rename.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
6
.github/workflows/web-api-publish.yml
vendored
6
.github/workflows/web-api-publish.yml
vendored
@ -10,16 +10,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v1
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.5"
|
node-version: "20"
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- name: Generate API Client
|
- name: Generate API Client
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
|
25
CODEOWNERS
25
CODEOWNERS
@ -1,2 +1,23 @@
|
|||||||
* @goauthentik/core
|
# Fallback
|
||||||
website/docs/security/** @goauthentik/security
|
* @goauthentik/backend @goauthentik/frontend
|
||||||
|
# Backend
|
||||||
|
authentik/ @goauthentik/backend
|
||||||
|
blueprints/ @goauthentik/backend
|
||||||
|
cmd/ @goauthentik/backend
|
||||||
|
internal/ @goauthentik/backend
|
||||||
|
lifecycle/ @goauthentik/backend
|
||||||
|
schemas/ @goauthentik/backend
|
||||||
|
scripts/ @goauthentik/backend
|
||||||
|
tests/ @goauthentik/backend
|
||||||
|
# Infrastructure
|
||||||
|
.github/ @goauthentik/infrastructure
|
||||||
|
Dockerfile @goauthentik/infrastructure
|
||||||
|
*Dockerfile @goauthentik/infrastructure
|
||||||
|
.dockerignore @goauthentik/infrastructure
|
||||||
|
docker-compose.yml @goauthentik/infrastructure
|
||||||
|
# Web
|
||||||
|
web/ @goauthentik/frontend
|
||||||
|
# Docs & Website
|
||||||
|
website/ @goauthentik/docs
|
||||||
|
# Security
|
||||||
|
website/docs/security/ @goauthentik/security
|
||||||
|
126
Dockerfile
126
Dockerfile
@ -1,53 +1,65 @@
|
|||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||||
|
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
WORKDIR /work/website
|
||||||
|
|
||||||
|
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||||
|
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||||
|
--mount=type=cache,target=/root/.npm \
|
||||||
|
npm ci --include=dev
|
||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./blueprints /work/blueprints/
|
COPY ./blueprints /work/blueprints/
|
||||||
COPY ./SECURITY.md /work/
|
COPY ./SECURITY.md /work/
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
RUN npm run build-docs-only
|
||||||
WORKDIR /work/website
|
|
||||||
RUN npm ci --include=dev && npm run build-docs-only
|
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||||
|
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
WORKDIR /work/web
|
||||||
|
|
||||||
|
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||||
|
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||||
|
--mount=type=cache,target=/root/.npm \
|
||||||
|
npm ci --include=dev
|
||||||
|
|
||||||
COPY ./web /work/web/
|
COPY ./web /work/web/
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
|
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
RUN npm run build
|
||||||
WORKDIR /work/web
|
|
||||||
RUN npm ci --include=dev && npm run build
|
|
||||||
|
|
||||||
# Stage 3: Poetry to requirements.txt export
|
# Stage 3: Build go proxy
|
||||||
FROM docker.io/python:3.11.5-slim-bookworm AS poetry-locker
|
FROM docker.io/golang:1.21.1-bookworm AS go-builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /go/src/goauthentik.io
|
||||||
COPY ./pyproject.toml /work
|
|
||||||
COPY ./poetry.lock /work
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir poetry && \
|
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||||
poetry export -f requirements.txt --output requirements.txt && \
|
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
--mount=type=cache,target=/go/pkg/mod \
|
||||||
|
go mod download
|
||||||
|
|
||||||
# Stage 4: Build go proxy
|
COPY ./cmd /go/src/goauthentik.io/cmd
|
||||||
FROM docker.io/golang:1.21.0-bookworm AS go-builder
|
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
|
||||||
|
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
|
||||||
|
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
|
||||||
|
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
|
||||||
|
COPY ./internal /go/src/goauthentik.io/internal
|
||||||
|
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||||
|
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||||
|
|
||||||
WORKDIR /work
|
ENV CGO_ENABLED=0
|
||||||
|
|
||||||
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
--mount=type=cache,target=/root/.cache/go-build \
|
||||||
|
go build -o /go/authentik ./cmd/server
|
||||||
|
|
||||||
COPY ./cmd /work/cmd
|
# Stage 4: MaxMind GeoIP
|
||||||
COPY ./authentik/lib /work/authentik/lib
|
|
||||||
COPY ./web/static.go /work/web/static.go
|
|
||||||
COPY ./internal /work/internal
|
|
||||||
COPY ./go.mod /work/go.mod
|
|
||||||
COPY ./go.sum /work/go.sum
|
|
||||||
|
|
||||||
RUN go build -o /work/bin/authentik ./cmd/server/
|
|
||||||
|
|
||||||
# Stage 5: MaxMind GeoIP
|
|
||||||
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
@ -61,6 +73,29 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
mkdir -p /usr/share/GeoIP && \
|
mkdir -p /usr/share/GeoIP && \
|
||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
|
# Stage 5: Python dependencies
|
||||||
|
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||||
|
|
||||||
|
WORKDIR /ak-root/poetry
|
||||||
|
|
||||||
|
ENV VENV_PATH="/ak-root/venv" \
|
||||||
|
POETRY_VIRTUALENVS_CREATE=false \
|
||||||
|
PATH="/ak-root/venv/bin:$PATH"
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apt \
|
||||||
|
apt-get update && \
|
||||||
|
# Required for installing pip packages
|
||||||
|
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||||
|
|
||||||
|
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||||
|
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||||
|
--mount=type=cache,target=/root/.cache/pip \
|
||||||
|
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||||
|
python -m venv /ak-root/venv/ && \
|
||||||
|
pip3 install --upgrade pip && \
|
||||||
|
pip3 install poetry && \
|
||||||
|
poetry install --only=main --no-ansi --no-interaction
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||||
|
|
||||||
@ -76,46 +111,45 @@ LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
|||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
COPY --from=poetry-locker /work/requirements.txt /
|
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
|
||||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# Required for installing pip packages
|
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \
|
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
pip install --no-cache-dir -r /requirements.txt && \
|
|
||||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \
|
|
||||||
apt-get autoremove --purge -y && \
|
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||||
mkdir -p /certs /media /blueprints && \
|
mkdir -p /certs /media /blueprints && \
|
||||||
mkdir -p /authentik/.ssh && \
|
mkdir -p /authentik/.ssh && \
|
||||||
chown authentik:authentik /certs /media /authentik/.ssh
|
mkdir -p /ak-root && \
|
||||||
|
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
|
COPY ./poetry.lock /
|
||||||
COPY ./schemas /schemas
|
COPY ./schemas /schemas
|
||||||
COPY ./locale /locale
|
COPY ./locale /locale
|
||||||
COPY ./tests /tests
|
COPY ./tests /tests
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./blueprints /blueprints
|
COPY ./blueprints /blueprints
|
||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
COPY --from=go-builder /work/bin/authentik /bin/authentik
|
COPY --from=go-builder /go/authentik /bin/authentik
|
||||||
|
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
COPY --from=website-builder /work/website/help/ /website/help/
|
COPY --from=website-builder /work/website/help/ /website/help/
|
||||||
|
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||||
|
|
||||||
USER 1000
|
USER 1000
|
||||||
|
|
||||||
ENV TMPDIR /dev/shm/
|
ENV TMPDIR=/dev/shm/ \
|
||||||
ENV PYTHONUNBUFFERED 1
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
PYTHONUNBUFFERED=1 \
|
||||||
|
PATH="/ak-root/venv/bin:$PATH" \
|
||||||
|
VENV_PATH="/ak-root/venv" \
|
||||||
|
POETRY_VIRTUALENVS_CREATE=false
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||||
|
|
||||||
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
|
ENTRYPOINT [ "dumb-init", "--", "/lifecycle/ak" ]
|
||||||
|
94
Makefile
94
Makefile
@ -1,9 +1,16 @@
|
|||||||
.SHELLFLAGS += -x -e
|
.PHONY: gen dev-reset all clean test web website
|
||||||
|
|
||||||
|
.SHELLFLAGS += ${SHELLFLAGS} -e
|
||||||
PWD = $(shell pwd)
|
PWD = $(shell pwd)
|
||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle
|
PY_SOURCES = authentik tests scripts lifecycle
|
||||||
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
|
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||||
|
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||||
|
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||||
|
|
||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
@ -19,57 +26,78 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
|||||||
website/integrations \
|
website/integrations \
|
||||||
website/src
|
website/src
|
||||||
|
|
||||||
all: lint-fix lint test gen web
|
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||||
|
|
||||||
|
help: ## Show this help
|
||||||
|
@echo "\nSpecify a command. The choices are:\n"
|
||||||
|
@grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||||
|
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-24s\033[m %s\n", $$1, $$2}' | \
|
||||||
|
sort
|
||||||
|
@echo ""
|
||||||
|
|
||||||
test-go:
|
test-go:
|
||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker:
|
test-docker: ## Run all tests in a docker-compose
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||||
docker-compose pull -q
|
docker-compose pull -q
|
||||||
docker-compose up --no-start
|
docker-compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root server test
|
docker-compose run -u root server test-all
|
||||||
rm -f .env
|
rm -f .env
|
||||||
|
|
||||||
test:
|
test: ## Run the server tests and produce a coverage report (locally)
|
||||||
coverage run manage.py test --keepdb authentik
|
coverage run manage.py test --keepdb authentik
|
||||||
coverage html
|
coverage html
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
lint-fix:
|
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||||
isort authentik $(PY_SOURCES)
|
isort authentik $(PY_SOURCES)
|
||||||
black authentik $(PY_SOURCES)
|
black authentik $(PY_SOURCES)
|
||||||
ruff authentik $(PY_SOURCES)
|
ruff authentik $(PY_SOURCES)
|
||||||
codespell -w $(CODESPELL_ARGS)
|
codespell -w $(CODESPELL_ARGS)
|
||||||
|
|
||||||
lint:
|
lint: ## Lint the python and golang sources
|
||||||
pylint $(PY_SOURCES)
|
pylint $(PY_SOURCES)
|
||||||
bandit -r $(PY_SOURCES) -x node_modules
|
bandit -r $(PY_SOURCES) -x node_modules
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
migrate:
|
migrate: ## Run the Authentik Django server's migrations
|
||||||
python -m lifecycle.migrate
|
python -m lifecycle.migrate
|
||||||
|
|
||||||
i18n-extract: i18n-extract-core web-i18n-extract
|
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||||
|
|
||||||
i18n-extract-core:
|
i18n-extract-core:
|
||||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||||
|
|
||||||
|
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
dev-drop-db:
|
||||||
|
echo dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
|
# Also remove the test-db if it exists
|
||||||
|
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||||
|
echo redis-cli -n 0 flushall
|
||||||
|
|
||||||
|
dev-create-db:
|
||||||
|
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
|
|
||||||
|
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## API Schema
|
## API Schema
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
gen-build:
|
gen-build: ## Extract the schema from the database
|
||||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||||
|
|
||||||
gen-changelog:
|
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||||
npx prettier --write changelog.md
|
npx prettier --write changelog.md
|
||||||
|
|
||||||
gen-diff:
|
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
|
||||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
@ -84,7 +112,7 @@ gen-clean:
|
|||||||
rm -rf web/api/src/
|
rm -rf web/api/src/
|
||||||
rm -rf api/
|
rm -rf api/
|
||||||
|
|
||||||
gen-client-ts:
|
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
@ -100,7 +128,7 @@ gen-client-ts:
|
|||||||
cd gen-ts-api && npm i
|
cd gen-ts-api && npm i
|
||||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-client-go:
|
gen-client-go: ## Build and install the authentik API for Golang
|
||||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||||
@ -117,7 +145,7 @@ gen-client-go:
|
|||||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||||
|
|
||||||
gen-dev-config:
|
gen-dev-config: ## Generate a local development config file
|
||||||
python -m scripts.generate_config
|
python -m scripts.generate_config
|
||||||
|
|
||||||
gen: gen-build gen-clean gen-client-ts
|
gen: gen-build gen-clean gen-client-ts
|
||||||
@ -126,21 +154,21 @@ gen: gen-build gen-clean gen-client-ts
|
|||||||
## Web
|
## Web
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
web-build: web-install
|
web-build: web-install ## Build the Authentik UI
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
web: web-lint-fix web-lint web-check-compile
|
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||||
|
|
||||||
web-install:
|
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
|
|
||||||
web-watch:
|
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
||||||
rm -rf web/dist/
|
rm -rf web/dist/
|
||||||
mkdir web/dist/
|
mkdir web/dist/
|
||||||
touch web/dist/.gitkeep
|
touch web/dist/.gitkeep
|
||||||
cd web && npm run watch
|
cd web && npm run watch
|
||||||
|
|
||||||
web-storybook-watch:
|
web-storybook-watch: ## Build and run the storybook documentation server
|
||||||
cd web && npm run storybook
|
cd web && npm run storybook
|
||||||
|
|
||||||
web-lint-fix:
|
web-lint-fix:
|
||||||
@ -160,7 +188,7 @@ web-i18n-extract:
|
|||||||
## Website
|
## Website
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
website: website-lint-fix website-build
|
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
|
||||||
|
|
||||||
website-install:
|
website-install:
|
||||||
cd website && npm ci
|
cd website && npm ci
|
||||||
@ -171,11 +199,22 @@ website-lint-fix:
|
|||||||
website-build:
|
website-build:
|
||||||
cd website && npm run build
|
cd website && npm run build
|
||||||
|
|
||||||
website-watch:
|
website-watch: ## Build and watch the documentation website, updating automatically
|
||||||
cd website && npm run watch
|
cd website && npm run watch
|
||||||
|
|
||||||
|
#########################
|
||||||
|
## Docker
|
||||||
|
#########################
|
||||||
|
|
||||||
|
docker: ## Build a docker image of the current source tree
|
||||||
|
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||||
|
|
||||||
|
#########################
|
||||||
|
## CI
|
||||||
|
#########################
|
||||||
# These targets are use by GitHub actions to allow usage of matrix
|
# These targets are use by GitHub actions to allow usage of matrix
|
||||||
# which makes the YAML File a lot smaller
|
# which makes the YAML File a lot smaller
|
||||||
|
|
||||||
ci--meta-debug:
|
ci--meta-debug:
|
||||||
python -V
|
python -V
|
||||||
node --version
|
node --version
|
||||||
@ -203,14 +242,3 @@ ci-pyright: ci--meta-debug
|
|||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
ak makemigrations --check
|
ak makemigrations --check
|
||||||
|
|
||||||
install: web-install website-install
|
|
||||||
poetry install
|
|
||||||
|
|
||||||
dev-reset:
|
|
||||||
dropdb -U postgres -h localhost authentik
|
|
||||||
# Also remove the test-db if it exists
|
|
||||||
dropdb -U postgres -h localhost test_authentik || true
|
|
||||||
createdb -U postgres -h localhost authentik
|
|
||||||
redis-cli -n 0 flushall
|
|
||||||
make migrate
|
|
||||||
|
12
README.md
12
README.md
@ -41,15 +41,3 @@ See [SECURITY.md](SECURITY.md)
|
|||||||
## Adoption and Contributions
|
## Adoption and Contributions
|
||||||
|
|
||||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||||
|
|
||||||
## Sponsors
|
|
||||||
|
|
||||||
This project is proudly sponsored by:
|
|
||||||
|
|
||||||
<p>
|
|
||||||
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
|
||||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
DigitalOcean provides development and testing resources for authentik.
|
|
||||||
|
@ -49,7 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||||||
if content == "":
|
if content == "":
|
||||||
return content
|
return content
|
||||||
context = self.instance.context if self.instance else {}
|
context = self.instance.context if self.instance else {}
|
||||||
valid, logs = Importer(content, context).validate()
|
valid, logs = Importer.from_string(content, context).validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
text_logs = "\n".join([x["event"] for x in logs])
|
text_logs = "\n".join([x["event"] for x in logs])
|
||||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||||
|
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
|||||||
"""Apply all blueprints in order, abort when one fails to import"""
|
"""Apply all blueprints in order, abort when one fails to import"""
|
||||||
for blueprint_path in options.get("blueprints", []):
|
for blueprint_path in options.get("blueprints", []):
|
||||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||||
importer = Importer(content)
|
importer = Importer.from_string(content)
|
||||||
valid, _ = importer.validate()
|
valid, _ = importer.validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
self.stderr.write("blueprint invalid")
|
self.stderr.write("blueprint invalid")
|
||||||
|
@ -9,6 +9,7 @@ from rest_framework.fields import Field, JSONField, UUIDField
|
|||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
@ -110,7 +111,7 @@ class Command(BaseCommand):
|
|||||||
"id": {"type": "string"},
|
"id": {"type": "string"},
|
||||||
"state": {
|
"state": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["absent", "present", "created"],
|
"enum": [s.value for s in BlueprintEntryDesiredState],
|
||||||
"default": "present",
|
"default": "present",
|
||||||
},
|
},
|
||||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||||
|
@ -20,7 +20,7 @@ def apply_blueprint(*files: str):
|
|||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
for file in files:
|
for file in files:
|
||||||
content = BlueprintInstance(path=file).retrieve()
|
content = BlueprintInstance(path=file).retrieve()
|
||||||
Importer(content).apply()
|
Importer.from_string(content).apply()
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@ -25,7 +25,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
|||||||
def tester(self: TestPackaged):
|
def tester(self: TestPackaged):
|
||||||
base = Path("blueprints/")
|
base = Path("blueprints/")
|
||||||
rel_path = Path(file_name).relative_to(base)
|
rel_path = Path(file_name).relative_to(base)
|
||||||
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
|
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
|
@ -21,14 +21,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
|
|
||||||
def test_blueprint_invalid_format(self):
|
def test_blueprint_invalid_format(self):
|
||||||
"""Test blueprint with invalid format"""
|
"""Test blueprint with invalid format"""
|
||||||
importer = Importer('{"version": 3}')
|
importer = Importer.from_string('{"version": 3}')
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer(
|
importer = Importer.from_string(
|
||||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||||
'"model": "authentik_core.User"}]}'
|
'"model": "authentik_core.User"}]}'
|
||||||
)
|
)
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer(
|
importer = Importer.from_string(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||||
'"identifiers": {}, '
|
'"identifiers": {}, '
|
||||||
'"model": "authentik_core.Group"}]}'
|
'"model": "authentik_core.Group"}]}'
|
||||||
@ -54,7 +54,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer(
|
importer = Importer.from_string(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||||
@ -103,7 +103,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
self.assertEqual(len(export.entries), 3)
|
self.assertEqual(len(export.entries), 3)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer(export_yaml)
|
importer = Importer.from_string(export_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
@ -113,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
"""Test export and import it twice"""
|
"""Test export and import it twice"""
|
||||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||||
|
|
||||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
count_before = Prompt.objects.filter(field_key="username").count()
|
count_before = Prompt.objects.filter(field_key="username").count()
|
||||||
self.assertEqual(count_initial + 1, count_before)
|
self.assertEqual(count_initial + 1, count_before)
|
||||||
|
|
||||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||||
@ -130,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||||
Group.objects.filter(name="test").delete()
|
Group.objects.filter(name="test").delete()
|
||||||
environ["foo"] = generate_id()
|
environ["foo"] = generate_id()
|
||||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||||
@ -248,7 +248,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
exporter = FlowExporter(flow)
|
exporter = FlowExporter(flow)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer(export_yaml)
|
importer = Importer.from_string(export_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||||
@ -297,7 +297,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||||||
exporter = FlowExporter(flow)
|
exporter = FlowExporter(flow)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer(export_yaml)
|
importer = Importer.from_string(export_yaml)
|
||||||
|
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
@ -18,7 +18,7 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
|||||||
self.uid = generate_id()
|
self.uid = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure objects exist
|
# Ensure objects exist
|
||||||
@ -35,7 +35,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure objects do not exist
|
# Ensure objects do not exist
|
||||||
|
@ -15,7 +15,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
@ -30,7 +30,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
self.assertEqual(flow.title, "bar")
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
# Ensure importer updates it
|
# Ensure importer updates it
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
@ -41,7 +41,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
@ -56,7 +56,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
self.assertEqual(flow.title, "bar")
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
# Ensure importer doesn't update it
|
# Ensure importer doesn't update it
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
@ -67,7 +67,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
@ -75,7 +75,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||||||
self.assertEqual(flow.slug, flow_slug)
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||||
importer = Importer(import_yaml)
|
importer = Importer.from_string(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
@ -12,6 +12,7 @@ from uuid import UUID
|
|||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db.models import Model, Q
|
from django.db.models import Model, Q
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import Field
|
from rest_framework.fields import Field
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||||
@ -52,6 +53,7 @@ class BlueprintEntryDesiredState(Enum):
|
|||||||
ABSENT = "absent"
|
ABSENT = "absent"
|
||||||
PRESENT = "present"
|
PRESENT = "present"
|
||||||
CREATED = "created"
|
CREATED = "created"
|
||||||
|
MUST_CREATED = "must_created"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -206,8 +208,8 @@ class KeyOf(YAMLTag):
|
|||||||
):
|
):
|
||||||
return _entry._state.instance.pbm_uuid
|
return _entry._state.instance.pbm_uuid
|
||||||
return _entry._state.instance.pk
|
return _entry._state.instance.pk
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance", entry
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -278,7 +280,7 @@ class Format(YAMLTag):
|
|||||||
try:
|
try:
|
||||||
return self.format_string % tuple(args)
|
return self.format_string % tuple(args)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError.from_entry(exc, entry)
|
||||||
|
|
||||||
|
|
||||||
class Find(YAMLTag):
|
class Find(YAMLTag):
|
||||||
@ -355,13 +357,15 @@ class Condition(YAMLTag):
|
|||||||
args.append(arg)
|
args.append(arg)
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
raise EntryInvalidError.from_entry(
|
||||||
|
"At least one value is required after mode selection.", entry
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
comparator = self._COMPARATORS[self.mode.upper()]
|
comparator = self._COMPARATORS[self.mode.upper()]
|
||||||
return comparator(tuple(bool(x) for x in args))
|
return comparator(tuple(bool(x) for x in args))
|
||||||
except (TypeError, KeyError) as exc:
|
except (TypeError, KeyError) as exc:
|
||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError.from_entry(exc, entry)
|
||||||
|
|
||||||
|
|
||||||
class If(YAMLTag):
|
class If(YAMLTag):
|
||||||
@ -393,7 +397,7 @@ class If(YAMLTag):
|
|||||||
blueprint,
|
blueprint,
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError.from_entry(exc, entry)
|
||||||
|
|
||||||
|
|
||||||
class Enumerate(YAMLTag, YAMLTagContext):
|
class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
@ -425,9 +429,10 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||||
"This is a noop. Check you are setting depth bigger than 0."
|
"This is a noop. Check you are setting depth bigger than 0.",
|
||||||
|
entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(self.iterable, YAMLTag):
|
if isinstance(self.iterable, YAMLTag):
|
||||||
@ -436,9 +441,10 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
iterable = self.iterable
|
iterable = self.iterable
|
||||||
|
|
||||||
if not isinstance(iterable, Iterable):
|
if not isinstance(iterable, Iterable):
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||||
"such as a sequence or a mapping"
|
"such as a sequence or a mapping",
|
||||||
|
entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(iterable, Mapping):
|
if isinstance(iterable, Mapping):
|
||||||
@ -449,7 +455,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
try:
|
try:
|
||||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
raise EntryInvalidError(exc)
|
raise EntryInvalidError.from_entry(exc, entry)
|
||||||
|
|
||||||
result = output_class()
|
result = output_class()
|
||||||
|
|
||||||
@ -461,8 +467,8 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||||
result = add_fn(result, resolved_body)
|
result = add_fn(result, resolved_body)
|
||||||
if not isinstance(result, output_class):
|
if not isinstance(result, output_class):
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
f"Invalid {self.__class__.__name__} item found: {resolved_body}", entry
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
self.__current_context = tuple()
|
self.__current_context = tuple()
|
||||||
@ -489,12 +495,13 @@ class EnumeratedItem(YAMLTag):
|
|||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
if self.depth == 0:
|
if self.depth == 0:
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"{self.__class__.__name__} tags are only usable "
|
f"{self.__class__.__name__} tags are only usable "
|
||||||
f"inside an {Enumerate.__name__} tag"
|
f"inside an {Enumerate.__name__} tag",
|
||||||
|
entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
||||||
|
|
||||||
return context_tag.get_context(entry, blueprint)
|
return context_tag.get_context(entry, blueprint)
|
||||||
|
|
||||||
@ -508,7 +515,7 @@ class Index(EnumeratedItem):
|
|||||||
try:
|
try:
|
||||||
return context[0]
|
return context[0]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError: # pragma: no cover
|
||||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||||
|
|
||||||
|
|
||||||
class Value(EnumeratedItem):
|
class Value(EnumeratedItem):
|
||||||
@ -520,7 +527,7 @@ class Value(EnumeratedItem):
|
|||||||
try:
|
try:
|
||||||
return context[1]
|
return context[1]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError: # pragma: no cover
|
||||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||||
|
|
||||||
|
|
||||||
class BlueprintDumper(SafeDumper):
|
class BlueprintDumper(SafeDumper):
|
||||||
@ -574,8 +581,26 @@ class BlueprintLoader(SafeLoader):
|
|||||||
class EntryInvalidError(SentryIgnoredException):
|
class EntryInvalidError(SentryIgnoredException):
|
||||||
"""Error raised when an entry is invalid"""
|
"""Error raised when an entry is invalid"""
|
||||||
|
|
||||||
serializer_errors: Optional[dict]
|
entry_model: Optional[str]
|
||||||
|
entry_id: Optional[str]
|
||||||
|
validation_error: Optional[ValidationError]
|
||||||
|
|
||||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.serializer_errors = serializer_errors
|
self.entry_model = None
|
||||||
|
self.entry_id = None
|
||||||
|
self.validation_error = validation_error
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_entry(
|
||||||
|
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
||||||
|
) -> "EntryInvalidError":
|
||||||
|
"""Create EntryInvalidError with the context of an entry"""
|
||||||
|
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
||||||
|
if isinstance(msg_or_exc, ValidationError):
|
||||||
|
error.validation_error = msg_or_exc
|
||||||
|
# Make sure the model and id are strings, depending where the error happens
|
||||||
|
# they might still be YAMLTag instances
|
||||||
|
error.entry_model = str(entry.model)
|
||||||
|
error.entry_id = str(entry.id)
|
||||||
|
return error
|
||||||
|
@ -8,9 +8,9 @@ from dacite.core import from_dict
|
|||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from django.db import transaction
|
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q
|
||||||
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
@ -38,6 +38,7 @@ from authentik.core.models import (
|
|||||||
from authentik.events.utils import cleanse_dict
|
from authentik.events.utils import cleanse_dict
|
||||||
from authentik.flows.models import FlowToken, Stage
|
from authentik.flows.models import FlowToken, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
|
|
||||||
@ -72,41 +73,53 @@ def is_model_allowed(model: type[Model]) -> bool:
|
|||||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||||
|
|
||||||
|
|
||||||
|
class DoRollback(SentryIgnoredException):
|
||||||
|
"""Exception to trigger a rollback"""
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def transaction_rollback():
|
def transaction_rollback():
|
||||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||||
atomic = transaction.atomic()
|
try:
|
||||||
# pylint: disable=unnecessary-dunder-call
|
with atomic():
|
||||||
atomic.__enter__()
|
yield
|
||||||
yield
|
raise DoRollback()
|
||||||
atomic.__exit__(IntegrityError, None, None)
|
except DoRollback:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
"""Import Blueprint from YAML"""
|
"""Import Blueprint from raw dict or YAML/JSON"""
|
||||||
|
|
||||||
logger: BoundLogger
|
logger: BoundLogger
|
||||||
|
_import: Blueprint
|
||||||
|
|
||||||
def __init__(self, yaml_input: str, context: Optional[dict] = None):
|
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
||||||
self.__pk_map: dict[Any, Model] = {}
|
self.__pk_map: dict[Any, Model] = {}
|
||||||
|
self._import = blueprint
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
|
ctx = {}
|
||||||
|
always_merger.merge(ctx, self._import.context)
|
||||||
|
if context:
|
||||||
|
always_merger.merge(ctx, context)
|
||||||
|
self._import.context = ctx
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||||
|
"""Parse YAML string and create blueprint importer from it"""
|
||||||
import_dict = load(yaml_input, BlueprintLoader)
|
import_dict = load(yaml_input, BlueprintLoader)
|
||||||
try:
|
try:
|
||||||
self.__import = from_dict(
|
_import = from_dict(
|
||||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||||
)
|
)
|
||||||
except DaciteError as exc:
|
except DaciteError as exc:
|
||||||
raise EntryInvalidError from exc
|
raise EntryInvalidError from exc
|
||||||
ctx = {}
|
return Importer(_import, context)
|
||||||
always_merger.merge(ctx, self.__import.context)
|
|
||||||
if context:
|
|
||||||
always_merger.merge(ctx, context)
|
|
||||||
self.__import.context = ctx
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def blueprint(self) -> Blueprint:
|
def blueprint(self) -> Blueprint:
|
||||||
"""Get imported blueprint"""
|
"""Get imported blueprint"""
|
||||||
return self.__import
|
return self._import
|
||||||
|
|
||||||
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Replace any value if it is a known primary key of an other object"""
|
"""Replace any value if it is a known primary key of an other object"""
|
||||||
@ -152,19 +165,19 @@ class Importer:
|
|||||||
# pylint: disable-msg=too-many-locals
|
# pylint: disable-msg=too-many-locals
|
||||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||||
"""Validate a single entry"""
|
"""Validate a single entry"""
|
||||||
if not entry.check_all_conditions_match(self.__import):
|
if not entry.check_all_conditions_match(self._import):
|
||||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
# Don't use isinstance since we don't want to check for inheritance
|
# Don't use isinstance since we don't want to check for inheritance
|
||||||
if not is_model_allowed(model):
|
if not is_model_allowed(model):
|
||||||
raise EntryInvalidError(f"Model {model} not allowed")
|
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
|
||||||
if issubclass(model, BaseMetaModel):
|
if issubclass(model, BaseMetaModel):
|
||||||
serializer_class: type[Serializer] = model.serializer()
|
serializer_class: type[Serializer] = model.serializer()
|
||||||
serializer = serializer_class(
|
serializer = serializer_class(
|
||||||
data=entry.get_attrs(self.__import),
|
data=entry.get_attrs(self._import),
|
||||||
context={
|
context={
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||||
},
|
},
|
||||||
@ -172,8 +185,10 @@ class Importer:
|
|||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
f"Serializer errors {serializer.errors}",
|
||||||
|
validation_error=exc,
|
||||||
|
entry=entry,
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
@ -182,7 +197,7 @@ class Importer:
|
|||||||
# the full serializer for later usage
|
# the full serializer for later usage
|
||||||
# Because a model might have multiple unique columns, we chain all identifiers together
|
# Because a model might have multiple unique columns, we chain all identifiers together
|
||||||
# to create an OR query.
|
# to create an OR query.
|
||||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import))
|
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self._import))
|
||||||
for key, value in list(updated_identifiers.items()):
|
for key, value in list(updated_identifiers.items()):
|
||||||
if isinstance(value, dict) and "pk" in value:
|
if isinstance(value, dict) and "pk" in value:
|
||||||
del updated_identifiers[key]
|
del updated_identifiers[key]
|
||||||
@ -190,12 +205,12 @@ class Importer:
|
|||||||
|
|
||||||
query = self.__query_from_identifier(updated_identifiers)
|
query = self.__query_from_identifier(updated_identifiers)
|
||||||
if not query:
|
if not query:
|
||||||
raise EntryInvalidError("No or invalid identifiers")
|
raise EntryInvalidError.from_entry("No or invalid identifiers", entry)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing_models = model.objects.filter(query)
|
existing_models = model.objects.filter(query)
|
||||||
except FieldError as exc:
|
except FieldError as exc:
|
||||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
raise EntryInvalidError.from_entry(f"Invalid identifier field: {exc}", entry) from exc
|
||||||
|
|
||||||
serializer_kwargs = {}
|
serializer_kwargs = {}
|
||||||
model_instance = existing_models.first()
|
model_instance = existing_models.first()
|
||||||
@ -208,6 +223,14 @@ class Importer:
|
|||||||
)
|
)
|
||||||
serializer_kwargs["instance"] = model_instance
|
serializer_kwargs["instance"] = model_instance
|
||||||
serializer_kwargs["partial"] = True
|
serializer_kwargs["partial"] = True
|
||||||
|
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||||
|
raise EntryInvalidError.from_entry(
|
||||||
|
(
|
||||||
|
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||||
|
"and object exists already",
|
||||||
|
),
|
||||||
|
entry,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialised new serializer instance",
|
"initialised new serializer instance",
|
||||||
@ -220,9 +243,9 @@ class Importer:
|
|||||||
model_instance.pk = updated_identifiers["pk"]
|
model_instance.pk = updated_identifiers["pk"]
|
||||||
serializer_kwargs["instance"] = model_instance
|
serializer_kwargs["instance"] = model_instance
|
||||||
try:
|
try:
|
||||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise EntryInvalidError(exc) from exc
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
always_merger.merge(full_data, updated_identifiers)
|
always_merger.merge(full_data, updated_identifiers)
|
||||||
serializer_kwargs["data"] = full_data
|
serializer_kwargs["data"] = full_data
|
||||||
|
|
||||||
@ -235,15 +258,17 @@ class Importer:
|
|||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
raise EntryInvalidError(
|
raise EntryInvalidError.from_entry(
|
||||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
f"Serializer errors {serializer.errors}",
|
||||||
|
validation_error=exc,
|
||||||
|
entry=entry,
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
def apply(self) -> bool:
|
def apply(self) -> bool:
|
||||||
"""Apply (create/update) models yaml, in database transaction"""
|
"""Apply (create/update) models yaml, in database transaction"""
|
||||||
try:
|
try:
|
||||||
with transaction.atomic():
|
with atomic():
|
||||||
if not self._apply_models():
|
if not self._apply_models():
|
||||||
self.logger.debug("Reverting changes due to error")
|
self.logger.debug("Reverting changes due to error")
|
||||||
raise IntegrityError
|
raise IntegrityError
|
||||||
@ -252,11 +277,11 @@ class Importer:
|
|||||||
self.logger.debug("Committing changes")
|
self.logger.debug("Committing changes")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _apply_models(self) -> bool:
|
def _apply_models(self, raise_errors=False) -> bool:
|
||||||
"""Apply (create/update) models yaml"""
|
"""Apply (create/update) models yaml"""
|
||||||
self.__pk_map = {}
|
self.__pk_map = {}
|
||||||
for entry in self.__import.entries:
|
for entry in self._import.entries:
|
||||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||||
try:
|
try:
|
||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
@ -269,15 +294,21 @@ class Importer:
|
|||||||
serializer = self._validate_single(entry)
|
serializer = self._validate_single(entry)
|
||||||
except EntryInvalidError as exc:
|
except EntryInvalidError as exc:
|
||||||
# For deleting objects we don't need the serializer to be valid
|
# For deleting objects we don't need the serializer to be valid
|
||||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
|
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||||
continue
|
continue
|
||||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||||
|
if raise_errors:
|
||||||
|
raise exc
|
||||||
return False
|
return False
|
||||||
if not serializer:
|
if not serializer:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
state = entry.get_state(self.__import)
|
state = entry.get_state(self._import)
|
||||||
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]:
|
if state in [
|
||||||
|
BlueprintEntryDesiredState.PRESENT,
|
||||||
|
BlueprintEntryDesiredState.CREATED,
|
||||||
|
BlueprintEntryDesiredState.MUST_CREATED,
|
||||||
|
]:
|
||||||
instance = serializer.instance
|
instance = serializer.instance
|
||||||
if (
|
if (
|
||||||
instance
|
instance
|
||||||
@ -305,23 +336,23 @@ class Importer:
|
|||||||
self.logger.debug("entry to delete with no instance, skipping")
|
self.logger.debug("entry to delete with no instance, skipping")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||||
"""Validate loaded blueprint export, ensure all models are allowed
|
"""Validate loaded blueprint export, ensure all models are allowed
|
||||||
and serializers have no errors"""
|
and serializers have no errors"""
|
||||||
self.logger.debug("Starting blueprint import validation")
|
self.logger.debug("Starting blueprint import validation")
|
||||||
orig_import = deepcopy(self.__import)
|
orig_import = deepcopy(self._import)
|
||||||
if self.__import.version != 1:
|
if self._import.version != 1:
|
||||||
self.logger.warning("Invalid blueprint version")
|
self.logger.warning("Invalid blueprint version")
|
||||||
return False, [{"event": "Invalid blueprint version"}]
|
return False, [{"event": "Invalid blueprint version"}]
|
||||||
with (
|
with (
|
||||||
transaction_rollback(),
|
transaction_rollback(),
|
||||||
capture_logs() as logs,
|
capture_logs() as logs,
|
||||||
):
|
):
|
||||||
successful = self._apply_models()
|
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||||
if not successful:
|
if not successful:
|
||||||
self.logger.debug("Blueprint validation failed")
|
self.logger.debug("Blueprint validation failed")
|
||||||
for log in logs:
|
for log in logs:
|
||||||
getattr(self.logger, log.get("log_level"))(**log)
|
getattr(self.logger, log.get("log_level"))(**log)
|
||||||
self.logger.debug("Finished blueprint import validation")
|
self.logger.debug("Finished blueprint import validation")
|
||||||
self.__import = orig_import
|
self._import = orig_import
|
||||||
return successful, logs
|
return successful, logs
|
||||||
|
@ -190,7 +190,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
|||||||
self.set_uid(slugify(instance.name))
|
self.set_uid(slugify(instance.name))
|
||||||
blueprint_content = instance.retrieve()
|
blueprint_content = instance.retrieve()
|
||||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||||
importer = Importer(blueprint_content, instance.context)
|
importer = Importer.from_string(blueprint_content, instance.context)
|
||||||
if importer.blueprint.metadata:
|
if importer.blueprint.metadata:
|
||||||
instance.metadata = asdict(importer.blueprint.metadata)
|
instance.metadata = asdict(importer.blueprint.metadata)
|
||||||
valid, logs = importer.validate()
|
valid, logs = importer.validate()
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
"""Authenticator Devices API Views"""
|
"""Authenticator Devices API Views"""
|
||||||
from django_otp import device_classes, devices_for_user
|
|
||||||
from django_otp.models import Device
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||||
@ -10,6 +8,8 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
|
|
||||||
from authentik.core.api.utils import MetaNameSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
|
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
|
||||||
|
|
||||||
class DeviceSerializer(MetaNameSerializer):
|
class DeviceSerializer(MetaNameSerializer):
|
||||||
|
139
authentik/core/api/transactional_applications.py
Normal file
139
authentik/core/api/transactional_applications.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
"""transactional application and provider creation"""
|
||||||
|
from django.apps import apps
|
||||||
|
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
|
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from yaml import ScalarNode
|
||||||
|
|
||||||
|
from authentik.blueprints.v1.common import (
|
||||||
|
Blueprint,
|
||||||
|
BlueprintEntry,
|
||||||
|
BlueprintEntryDesiredState,
|
||||||
|
EntryInvalidError,
|
||||||
|
KeyOf,
|
||||||
|
)
|
||||||
|
from authentik.blueprints.v1.importer import Importer
|
||||||
|
from authentik.core.api.applications import ApplicationSerializer
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.core.models import Provider
|
||||||
|
from authentik.lib.utils.reflection import all_subclasses
|
||||||
|
|
||||||
|
|
||||||
|
def get_provider_serializer_mapping():
|
||||||
|
"""Get a mapping of all providers' model names and their serializers"""
|
||||||
|
mapping = {}
|
||||||
|
for model in all_subclasses(Provider):
|
||||||
|
if model._meta.abstract:
|
||||||
|
continue
|
||||||
|
mapping[f"{model._meta.app_label}.{model._meta.model_name}"] = model().serializer
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_field(
|
||||||
|
PolymorphicProxySerializer(
|
||||||
|
component_name="model",
|
||||||
|
serializers=get_provider_serializer_mapping,
|
||||||
|
resource_type_field_name="provider_model",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
class TransactionProviderField(DictField):
|
||||||
|
"""Dictionary field which can hold provider creation data"""
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionApplicationSerializer(PassiveSerializer):
|
||||||
|
"""Serializer for creating a provider and an application in one transaction"""
|
||||||
|
|
||||||
|
app = ApplicationSerializer()
|
||||||
|
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
|
||||||
|
provider = TransactionProviderField()
|
||||||
|
|
||||||
|
_provider_model: type[Provider] = None
|
||||||
|
|
||||||
|
def validate_provider_model(self, fq_model_name: str) -> str:
|
||||||
|
"""Validate that the model exists and is a provider"""
|
||||||
|
if "." not in fq_model_name:
|
||||||
|
raise ValidationError("Invalid provider model")
|
||||||
|
try:
|
||||||
|
app, _, model_name = fq_model_name.partition(".")
|
||||||
|
model = apps.get_model(app, model_name)
|
||||||
|
if not issubclass(model, Provider):
|
||||||
|
raise ValidationError("Invalid provider model")
|
||||||
|
self._provider_model = model
|
||||||
|
except LookupError:
|
||||||
|
raise ValidationError("Invalid provider model")
|
||||||
|
return fq_model_name
|
||||||
|
|
||||||
|
def validate(self, attrs: dict) -> dict:
|
||||||
|
blueprint = Blueprint()
|
||||||
|
blueprint.entries.append(
|
||||||
|
BlueprintEntry(
|
||||||
|
model=attrs["provider_model"],
|
||||||
|
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||||
|
identifiers={
|
||||||
|
"name": attrs["provider"]["name"],
|
||||||
|
},
|
||||||
|
# Must match the name of the field on `self`
|
||||||
|
id="provider",
|
||||||
|
attrs=attrs["provider"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
app_data = attrs["app"]
|
||||||
|
app_data["provider"] = KeyOf(None, ScalarNode(tag="", value="provider"))
|
||||||
|
blueprint.entries.append(
|
||||||
|
BlueprintEntry(
|
||||||
|
model="authentik_core.application",
|
||||||
|
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||||
|
identifiers={
|
||||||
|
"slug": attrs["app"]["slug"],
|
||||||
|
},
|
||||||
|
attrs=app_data,
|
||||||
|
# Must match the name of the field on `self`
|
||||||
|
id="app",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
importer = Importer(blueprint, {})
|
||||||
|
try:
|
||||||
|
valid, _ = importer.validate(raise_validation_errors=True)
|
||||||
|
if not valid:
|
||||||
|
raise ValidationError("Invalid blueprint")
|
||||||
|
except EntryInvalidError as exc:
|
||||||
|
raise ValidationError(
|
||||||
|
{
|
||||||
|
exc.entry_id: exc.validation_error.detail,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return blueprint
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionApplicationResponseSerializer(PassiveSerializer):
|
||||||
|
"""Transactional creation response"""
|
||||||
|
|
||||||
|
applied = BooleanField()
|
||||||
|
logs = ListField(child=CharField())
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionalApplicationView(APIView):
|
||||||
|
"""Create provider and application and attach them in a single transaction"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
request=TransactionApplicationSerializer(),
|
||||||
|
responses={
|
||||||
|
200: TransactionApplicationResponseSerializer(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def put(self, request: Request) -> Response:
|
||||||
|
"""Convert data into a blueprint, validate it and apply it"""
|
||||||
|
data = TransactionApplicationSerializer(data=request.data)
|
||||||
|
data.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
importer = Importer(data.validated_data, {})
|
||||||
|
applied = importer.apply()
|
||||||
|
response = {"applied": False, "logs": []}
|
||||||
|
response["applied"] = applied
|
||||||
|
return Response(response, status=200)
|
@ -616,8 +616,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if not request.user.has_perm("impersonate"):
|
if not request.user.has_perm("impersonate"):
|
||||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
|
|
||||||
user_to_be = self.get_object()
|
user_to_be = self.get_object()
|
||||||
|
if user_to_be.pk == self.request.user.pk:
|
||||||
|
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||||
|
return Response(status=401)
|
||||||
|
|
||||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||||
|
9
authentik/core/management/commands/dev_server.py
Normal file
9
authentik/core/management/commands/dev_server.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
"""custom runserver command"""
|
||||||
|
from daphne.management.commands.runserver import Command as RunServer
|
||||||
|
|
||||||
|
|
||||||
|
class Command(RunServer):
|
||||||
|
"""custom runserver command, which doesn't show the misleading django startup message"""
|
||||||
|
|
||||||
|
def on_bind(self, server_port):
|
||||||
|
pass
|
@ -16,6 +16,9 @@ LOGGER = get_logger()
|
|||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""Run worker"""
|
"""Run worker"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument("-b", "--beat", action="store_true")
|
||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.get_bool("remote_debug"):
|
if CONFIG.get_bool("remote_debug"):
|
||||||
@ -26,9 +29,9 @@ class Command(BaseCommand):
|
|||||||
no_color=False,
|
no_color=False,
|
||||||
quiet=True,
|
quiet=True,
|
||||||
optimization="fair",
|
optimization="fair",
|
||||||
autoscale=(3, 1),
|
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
|
||||||
task_events=True,
|
task_events=True,
|
||||||
beat=True,
|
beat=options.get("beat", True),
|
||||||
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
||||||
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
||||||
)
|
)
|
||||||
|
41
authentik/core/migrations/0032_groupsourceconnection.py
Normal file
41
authentik/core/migrations/0032_groupsourceconnection.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 4.2.5 on 2023-09-27 10:44
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0031_alter_user_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="GroupSourceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("created", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("last_updated", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"group",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"source",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.source"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"unique_together": {("group", "source")},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -575,6 +575,23 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
|||||||
unique_together = (("user", "source"),)
|
unique_together = (("user", "source"),)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||||
|
"""Connection between Group and Source."""
|
||||||
|
|
||||||
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
|
source = models.ForeignKey(Source, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
objects = InheritanceManager()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[Serializer]:
|
||||||
|
"""Get serializer for this model"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = (("group", "source"),)
|
||||||
|
|
||||||
|
|
||||||
class ExpiringModel(models.Model):
|
class ExpiringModel(models.Model):
|
||||||
"""Base Model which can expire, and is automatically cleaned up."""
|
"""Base Model which can expire, and is automatically cleaned up."""
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ class Action(Enum):
|
|||||||
class MessageStage(StageView):
|
class MessageStage(StageView):
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
|
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
message = getattr(self.executor.current_stage, "message", "")
|
message = getattr(self.executor.current_stage, "message", "")
|
||||||
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
||||||
@ -59,10 +59,6 @@ class MessageStage(StageView):
|
|||||||
)
|
)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|
||||||
def post(self, request: HttpRequest) -> HttpResponse:
|
|
||||||
"""Wrapper for post requests"""
|
|
||||||
return self.get(request)
|
|
||||||
|
|
||||||
|
|
||||||
class SourceFlowManager:
|
class SourceFlowManager:
|
||||||
"""Help sources decide what they should do after authorization. Based on source settings and
|
"""Help sources decide what they should do after authorization. Based on source settings and
|
||||||
|
@ -13,7 +13,7 @@ class PostUserEnrollmentStage(StageView):
|
|||||||
"""Dynamically injected stage which saves the Connection after
|
"""Dynamically injected stage which saves the Connection after
|
||||||
the user has been enrolled."""
|
the user has been enrolled."""
|
||||||
|
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def dispatch(self, request: HttpRequest) -> HttpResponse:
|
||||||
"""Stage used after the user has been enrolled"""
|
"""Stage used after the user has been enrolled"""
|
||||||
connection: UserSourceConnection = self.executor.plan.context[
|
connection: UserSourceConnection = self.executor.plan.context[
|
||||||
PLAN_CONTEXT_SOURCES_CONNECTION
|
PLAN_CONTEXT_SOURCES_CONNECTION
|
||||||
@ -27,7 +27,3 @@ class PostUserEnrollmentStage(StageView):
|
|||||||
source=connection.source,
|
source=connection.source,
|
||||||
).from_http(self.request)
|
).from_http(self.request)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|
||||||
def post(self, request: HttpRequest) -> HttpResponse:
|
|
||||||
"""Wrapper for post requests"""
|
|
||||||
return self.get(request)
|
|
||||||
|
@ -6,6 +6,7 @@ from rest_framework.test import APITestCase
|
|||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
class TestImpersonation(APITestCase):
|
class TestImpersonation(APITestCase):
|
||||||
@ -46,12 +47,42 @@ class TestImpersonation(APITestCase):
|
|||||||
"""test impersonation without permissions"""
|
"""test impersonation without permissions"""
|
||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
|
||||||
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}))
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 403)
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
response_body = loads(response.content.decode())
|
response_body = loads(response.content.decode())
|
||||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||||
|
|
||||||
|
@CONFIG.patch("impersonation", False)
|
||||||
|
def test_impersonate_disabled(self):
|
||||||
|
"""test impersonation that is disabled"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 401)
|
||||||
|
|
||||||
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
|
response_body = loads(response.content.decode())
|
||||||
|
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||||
|
|
||||||
|
def test_impersonate_self(self):
|
||||||
|
"""test impersonation that user can't impersonate themselves"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 401)
|
||||||
|
|
||||||
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
|
response_body = loads(response.content.decode())
|
||||||
|
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||||
|
|
||||||
def test_un_impersonate_empty(self):
|
def test_un_impersonate_empty(self):
|
||||||
"""test un-impersonation without impersonating first"""
|
"""test un-impersonation without impersonating first"""
|
||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
64
authentik/core/tests/test_transactional_applications_api.py
Normal file
64
authentik/core/tests/test_transactional_applications_api.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
"""Test Transactional API"""
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.providers.oauth2.models import OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
class TestTransactionalApplicationsAPI(APITestCase):
|
||||||
|
"""Test Transactional API"""
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.user = create_test_admin_user()
|
||||||
|
|
||||||
|
def test_create_transactional(self):
|
||||||
|
"""Test transactional Application + provider creation"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
uid = generate_id()
|
||||||
|
authorization_flow = create_test_flow()
|
||||||
|
response = self.client.put(
|
||||||
|
reverse("authentik_api:core-transactional-application"),
|
||||||
|
data={
|
||||||
|
"app": {
|
||||||
|
"name": uid,
|
||||||
|
"slug": uid,
|
||||||
|
},
|
||||||
|
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||||
|
"provider": {
|
||||||
|
"name": uid,
|
||||||
|
"authorization_flow": str(authorization_flow.pk),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
|
||||||
|
provider = OAuth2Provider.objects.filter(name=uid).first()
|
||||||
|
self.assertIsNotNone(provider)
|
||||||
|
app = Application.objects.filter(slug=uid).first()
|
||||||
|
self.assertIsNotNone(app)
|
||||||
|
self.assertEqual(app.provider.pk, provider.pk)
|
||||||
|
|
||||||
|
def test_create_transactional_invalid(self):
|
||||||
|
"""Test transactional Application + provider creation"""
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
uid = generate_id()
|
||||||
|
response = self.client.put(
|
||||||
|
reverse("authentik_api:core-transactional-application"),
|
||||||
|
data={
|
||||||
|
"app": {
|
||||||
|
"name": uid,
|
||||||
|
"slug": uid,
|
||||||
|
},
|
||||||
|
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||||
|
"provider": {
|
||||||
|
"name": uid,
|
||||||
|
"authorization_flow": "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
response.content.decode(),
|
||||||
|
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||||
|
)
|
@ -25,10 +25,10 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
|||||||
"""Generate a test-admin user"""
|
"""Generate a test-admin user"""
|
||||||
uid = generate_id(20) if not name else name
|
uid = generate_id(20) if not name else name
|
||||||
group = Group.objects.create(name=uid, is_superuser=True)
|
group = Group.objects.create(name=uid, is_superuser=True)
|
||||||
|
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
||||||
|
kwargs.setdefault("username", uid)
|
||||||
user: User = User.objects.create(
|
user: User = User.objects.create(
|
||||||
username=uid,
|
|
||||||
name=uid,
|
name=uid,
|
||||||
email=f"{uid}@goauthentik.io",
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
user.set_password(uid)
|
user.set_password(uid)
|
||||||
|
@ -15,6 +15,7 @@ from authentik.core.api.propertymappings import PropertyMappingViewSet
|
|||||||
from authentik.core.api.providers import ProviderViewSet
|
from authentik.core.api.providers import ProviderViewSet
|
||||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||||
from authentik.core.api.tokens import TokenViewSet
|
from authentik.core.api.tokens import TokenViewSet
|
||||||
|
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
||||||
from authentik.core.api.users import UserViewSet
|
from authentik.core.api.users import UserViewSet
|
||||||
from authentik.core.views import apps
|
from authentik.core.views import apps
|
||||||
from authentik.core.views.debug import AccessDeniedView
|
from authentik.core.views.debug import AccessDeniedView
|
||||||
@ -70,6 +71,11 @@ urlpatterns = [
|
|||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
||||||
("core/applications", ApplicationViewSet),
|
("core/applications", ApplicationViewSet),
|
||||||
|
path(
|
||||||
|
"core/transactional/applications/",
|
||||||
|
TransactionalApplicationView.as_view(),
|
||||||
|
name="core-transactional-application",
|
||||||
|
),
|
||||||
("core/groups", GroupViewSet),
|
("core/groups", GroupViewSet),
|
||||||
("core/users", UserViewSet),
|
("core/users", UserViewSet),
|
||||||
("core/tokens", TokenViewSet),
|
("core/tokens", TokenViewSet),
|
||||||
|
@ -1,44 +1,30 @@
|
|||||||
"""Enterprise license policies"""
|
"""Enterprise license policies"""
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from rest_framework.serializers import BaseSerializer
|
|
||||||
|
|
||||||
from authentik.core.models import User, UserTypes
|
from authentik.core.models import User, UserTypes
|
||||||
from authentik.enterprise.models import LicenseKey
|
from authentik.enterprise.models import LicenseKey
|
||||||
from authentik.policies.models import Policy
|
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
from authentik.policies.views import PolicyAccessView
|
from authentik.policies.views import PolicyAccessView
|
||||||
|
|
||||||
|
|
||||||
class EnterprisePolicy(Policy):
|
|
||||||
"""Check that a user is correctly licensed for the request"""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def component(self) -> str:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
|
||||||
if not LicenseKey.get_total().is_valid():
|
|
||||||
return PolicyResult(False)
|
|
||||||
if request.user.type != UserTypes.INTERNAL:
|
|
||||||
return PolicyResult(False)
|
|
||||||
return PolicyResult(True)
|
|
||||||
|
|
||||||
|
|
||||||
class EnterprisePolicyAccessView(PolicyAccessView):
|
class EnterprisePolicyAccessView(PolicyAccessView):
|
||||||
"""PolicyAccessView which also checks enterprise licensing"""
|
"""PolicyAccessView which also checks enterprise licensing"""
|
||||||
|
|
||||||
|
def check_license(self):
|
||||||
|
"""Check license"""
|
||||||
|
if not LicenseKey.get_total().is_valid():
|
||||||
|
return False
|
||||||
|
if self.request.user.type != UserTypes.INTERNAL:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
||||||
user = user or self.request.user
|
user = user or self.request.user
|
||||||
request = PolicyRequest(user)
|
request = PolicyRequest(user)
|
||||||
request.http_request = self.request
|
request.http_request = self.request
|
||||||
result = super().user_has_access(user)
|
result = super().user_has_access(user)
|
||||||
enterprise_result = EnterprisePolicy().passes(request)
|
enterprise_result = self.check_license()
|
||||||
if not enterprise_result.passing:
|
if not enterprise_result:
|
||||||
return enterprise_result
|
return enterprise_result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ from django.core.exceptions import SuspiciousOperation
|
|||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
from django_otp.plugins.otp_static.models import StaticToken
|
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
|
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
@ -30,6 +29,7 @@ from authentik.outposts.models import OutpostServiceConnection
|
|||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||||
|
from authentik.stages.authenticator_static.models import StaticToken
|
||||||
|
|
||||||
IGNORED_MODELS = (
|
IGNORED_MODELS = (
|
||||||
Event,
|
Event,
|
||||||
|
@ -181,7 +181,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if not file:
|
if not file:
|
||||||
return Response(data=import_response.initial_data, status=400)
|
return Response(data=import_response.initial_data, status=400)
|
||||||
|
|
||||||
importer = Importer(file.read().decode())
|
importer = Importer.from_string(file.read().decode())
|
||||||
valid, logs = importer.validate()
|
valid, logs = importer.validate()
|
||||||
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
|
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
|
||||||
import_response.initial_data["success"] = valid
|
import_response.initial_data["success"] = valid
|
||||||
|
@ -26,3 +26,8 @@ class EmptyFlowException(SentryIgnoredException):
|
|||||||
|
|
||||||
class FlowSkipStageException(SentryIgnoredException):
|
class FlowSkipStageException(SentryIgnoredException):
|
||||||
"""Exception to skip a stage"""
|
"""Exception to skip a stage"""
|
||||||
|
|
||||||
|
|
||||||
|
class StageInvalidException(SentryIgnoredException):
|
||||||
|
"""Exception can be thrown in a `Challenge` or `ChallengeResponse` serializer's
|
||||||
|
validation to trigger a `executor.stage_invalid()` response"""
|
||||||
|
@ -23,6 +23,7 @@ from authentik.flows.challenge import (
|
|||||||
RedirectChallenge,
|
RedirectChallenge,
|
||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
|
from authentik.flows.exceptions import StageInvalidException
|
||||||
from authentik.flows.models import InvalidResponseAction
|
from authentik.flows.models import InvalidResponseAction
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.lib.avatars import DEFAULT_AVATAR
|
from authentik.lib.avatars import DEFAULT_AVATAR
|
||||||
@ -100,8 +101,14 @@ class ChallengeStageView(StageView):
|
|||||||
|
|
||||||
def post(self, request: Request, *args, **kwargs) -> HttpResponse:
|
def post(self, request: Request, *args, **kwargs) -> HttpResponse:
|
||||||
"""Handle challenge response"""
|
"""Handle challenge response"""
|
||||||
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
valid = False
|
||||||
if not challenge.is_valid():
|
try:
|
||||||
|
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
||||||
|
valid = challenge.is_valid()
|
||||||
|
except StageInvalidException as exc:
|
||||||
|
self.logger.debug("Got StageInvalidException", exc=exc)
|
||||||
|
return self.executor.stage_invalid()
|
||||||
|
if not valid:
|
||||||
if self.executor.current_binding.invalid_response_action in [
|
if self.executor.current_binding.invalid_response_action in [
|
||||||
InvalidResponseAction.RESTART,
|
InvalidResponseAction.RESTART,
|
||||||
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||||
|
@ -21,8 +21,9 @@ def view_tester_factory(view_class: type[StageView]) -> Callable:
|
|||||||
|
|
||||||
def tester(self: TestViews):
|
def tester(self: TestViews):
|
||||||
model_class = view_class(self.exec)
|
model_class = view_class(self.exec)
|
||||||
self.assertIsNotNone(model_class.post)
|
if not hasattr(model_class, "dispatch"):
|
||||||
self.assertIsNotNone(model_class.get)
|
self.assertIsNotNone(model_class.post)
|
||||||
|
self.assertIsNotNone(model_class.get)
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
@ -42,6 +42,7 @@ from authentik.flows.models import (
|
|||||||
FlowDesignation,
|
FlowDesignation,
|
||||||
FlowStageBinding,
|
FlowStageBinding,
|
||||||
FlowToken,
|
FlowToken,
|
||||||
|
InvalidResponseAction,
|
||||||
Stage,
|
Stage,
|
||||||
)
|
)
|
||||||
from authentik.flows.planner import (
|
from authentik.flows.planner import (
|
||||||
@ -73,40 +74,23 @@ QS_QUERY = "query"
|
|||||||
|
|
||||||
|
|
||||||
def challenge_types():
|
def challenge_types():
|
||||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
"""This function returns a mapping which contains all subclasses of challenges
|
||||||
`serializers`. This function returns a class which is an iterator, which returns the
|
|
||||||
subclasses of Challenge, and Challenge itself."""
|
subclasses of Challenge, and Challenge itself."""
|
||||||
|
mapping = {}
|
||||||
class Inner(dict):
|
for cls in all_subclasses(Challenge):
|
||||||
"""dummy class with custom callback on .items()"""
|
if cls == WithUserInfoChallenge:
|
||||||
|
continue
|
||||||
def items(self):
|
mapping[cls().fields["component"].default] = cls
|
||||||
mapping = {}
|
return mapping
|
||||||
classes = all_subclasses(Challenge)
|
|
||||||
classes.remove(WithUserInfoChallenge)
|
|
||||||
for cls in classes:
|
|
||||||
mapping[cls().fields["component"].default] = cls
|
|
||||||
return mapping.items()
|
|
||||||
|
|
||||||
return Inner()
|
|
||||||
|
|
||||||
|
|
||||||
def challenge_response_types():
|
def challenge_response_types():
|
||||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
"""This function returns a mapping which contains all subclasses of challenges
|
||||||
`serializers`. This function returns a class which is an iterator, which returns the
|
|
||||||
subclasses of Challenge, and Challenge itself."""
|
subclasses of Challenge, and Challenge itself."""
|
||||||
|
mapping = {}
|
||||||
class Inner(dict):
|
for cls in all_subclasses(ChallengeResponse):
|
||||||
"""dummy class with custom callback on .items()"""
|
mapping[cls(stage=None).fields["component"].default] = cls
|
||||||
|
return mapping
|
||||||
def items(self):
|
|
||||||
mapping = {}
|
|
||||||
classes = all_subclasses(ChallengeResponse)
|
|
||||||
for cls in classes:
|
|
||||||
mapping[cls(stage=None).fields["component"].default] = cls
|
|
||||||
return mapping.items()
|
|
||||||
|
|
||||||
return Inner()
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidStageError(SentryIgnoredException):
|
class InvalidStageError(SentryIgnoredException):
|
||||||
@ -122,7 +106,7 @@ class FlowExecutorView(APIView):
|
|||||||
flow: Flow
|
flow: Flow
|
||||||
|
|
||||||
plan: Optional[FlowPlan] = None
|
plan: Optional[FlowPlan] = None
|
||||||
current_binding: FlowStageBinding
|
current_binding: Optional[FlowStageBinding] = None
|
||||||
current_stage: Stage
|
current_stage: Stage
|
||||||
current_stage_view: View
|
current_stage_view: View
|
||||||
|
|
||||||
@ -264,7 +248,7 @@ class FlowExecutorView(APIView):
|
|||||||
responses={
|
responses={
|
||||||
200: PolymorphicProxySerializer(
|
200: PolymorphicProxySerializer(
|
||||||
component_name="ChallengeTypes",
|
component_name="ChallengeTypes",
|
||||||
serializers=challenge_types(),
|
serializers=challenge_types,
|
||||||
resource_type_field_name="component",
|
resource_type_field_name="component",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
@ -295,7 +279,7 @@ class FlowExecutorView(APIView):
|
|||||||
span.set_data("Method", "GET")
|
span.set_data("Method", "GET")
|
||||||
span.set_data("authentik Stage", self.current_stage_view)
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
span.set_data("authentik Flow", self.flow.slug)
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
stage_response = self.current_stage_view.dispatch(request)
|
||||||
return to_stage_response(request, stage_response)
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
return self.handle_exception(exc)
|
return self.handle_exception(exc)
|
||||||
@ -304,13 +288,13 @@ class FlowExecutorView(APIView):
|
|||||||
responses={
|
responses={
|
||||||
200: PolymorphicProxySerializer(
|
200: PolymorphicProxySerializer(
|
||||||
component_name="ChallengeTypes",
|
component_name="ChallengeTypes",
|
||||||
serializers=challenge_types(),
|
serializers=challenge_types,
|
||||||
resource_type_field_name="component",
|
resource_type_field_name="component",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
request=PolymorphicProxySerializer(
|
request=PolymorphicProxySerializer(
|
||||||
component_name="FlowChallengeResponse",
|
component_name="FlowChallengeResponse",
|
||||||
serializers=challenge_response_types(),
|
serializers=challenge_response_types,
|
||||||
resource_type_field_name="component",
|
resource_type_field_name="component",
|
||||||
),
|
),
|
||||||
parameters=[
|
parameters=[
|
||||||
@ -339,7 +323,7 @@ class FlowExecutorView(APIView):
|
|||||||
span.set_data("Method", "POST")
|
span.set_data("Method", "POST")
|
||||||
span.set_data("authentik Stage", self.current_stage_view)
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
span.set_data("authentik Flow", self.flow.slug)
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
stage_response = self.current_stage_view.dispatch(request)
|
||||||
return to_stage_response(request, stage_response)
|
return to_stage_response(request, stage_response)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
return self.handle_exception(exc)
|
return self.handle_exception(exc)
|
||||||
@ -362,10 +346,15 @@ class FlowExecutorView(APIView):
|
|||||||
def restart_flow(self, keep_context=False) -> HttpResponse:
|
def restart_flow(self, keep_context=False) -> HttpResponse:
|
||||||
"""Restart the currently active flow, optionally keeping the current context"""
|
"""Restart the currently active flow, optionally keeping the current context"""
|
||||||
planner = FlowPlanner(self.flow)
|
planner = FlowPlanner(self.flow)
|
||||||
|
planner.use_cache = False
|
||||||
default_context = None
|
default_context = None
|
||||||
if keep_context:
|
if keep_context:
|
||||||
default_context = self.plan.context
|
default_context = self.plan.context
|
||||||
plan = planner.plan(self.request, default_context)
|
try:
|
||||||
|
plan = planner.plan(self.request, default_context)
|
||||||
|
except FlowNonApplicableException as exc:
|
||||||
|
self._logger.warning("f(exec): Flow restart not applicable to current user", exc=exc)
|
||||||
|
return self.handle_invalid_flow(exc)
|
||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
kwargs = self.kwargs
|
kwargs = self.kwargs
|
||||||
kwargs.update({"flow_slug": self.flow.slug})
|
kwargs.update({"flow_slug": self.flow.slug})
|
||||||
@ -423,6 +412,19 @@ class FlowExecutorView(APIView):
|
|||||||
Optionally, an exception can be passed, which will be shown if the current user
|
Optionally, an exception can be passed, which will be shown if the current user
|
||||||
is a superuser."""
|
is a superuser."""
|
||||||
self._logger.debug("f(exec): Stage invalid")
|
self._logger.debug("f(exec): Stage invalid")
|
||||||
|
if self.current_binding and self.current_binding.invalid_response_action in [
|
||||||
|
InvalidResponseAction.RESTART,
|
||||||
|
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||||
|
]:
|
||||||
|
keep_context = (
|
||||||
|
self.current_binding.invalid_response_action
|
||||||
|
== InvalidResponseAction.RESTART_WITH_CONTEXT
|
||||||
|
)
|
||||||
|
self._logger.debug(
|
||||||
|
"f(exec): Invalid response, restarting flow",
|
||||||
|
keep_context=keep_context,
|
||||||
|
)
|
||||||
|
return self.restart_flow(keep_context)
|
||||||
self.cancel()
|
self.cancel()
|
||||||
challenge_view = AccessDeniedChallengeView(self, error_message)
|
challenge_view = AccessDeniedChallengeView(self, error_message)
|
||||||
challenge_view.request = self.request
|
challenge_view.request = self.request
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# update website/docs/installation/configuration.md
|
# update website/docs/installation/configuration.mdx
|
||||||
# This is the default configuration file
|
# This is the default configuration file
|
||||||
postgresql:
|
postgresql:
|
||||||
host: localhost
|
host: localhost
|
||||||
@ -7,6 +7,7 @@ postgresql:
|
|||||||
port: 5432
|
port: 5432
|
||||||
password: "env://POSTGRES_PASSWORD"
|
password: "env://POSTGRES_PASSWORD"
|
||||||
use_pgbouncer: false
|
use_pgbouncer: false
|
||||||
|
use_pgpool: false
|
||||||
|
|
||||||
listen:
|
listen:
|
||||||
listen_http: 0.0.0.0:9000
|
listen_http: 0.0.0.0:9000
|
||||||
@ -110,3 +111,6 @@ web:
|
|||||||
# No default here as it's set dynamically
|
# No default here as it's set dynamically
|
||||||
# workers: 2
|
# workers: 2
|
||||||
threads: 4
|
threads: 4
|
||||||
|
|
||||||
|
worker:
|
||||||
|
concurrency: 2
|
||||||
|
@ -7,7 +7,6 @@ from typing import Any, Iterable, Optional
|
|||||||
|
|
||||||
from cachetools import TLRUCache, cached
|
from cachetools import TLRUCache, cached
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from django_otp import devices_for_user
|
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from sentry_sdk.hub import Hub
|
from sentry_sdk.hub import Hub
|
||||||
@ -20,6 +19,7 @@ from authentik.lib.utils.http import get_http_session
|
|||||||
from authentik.policies.models import Policy, PolicyBinding
|
from authentik.policies.models import Policy, PolicyBinding
|
||||||
from authentik.policies.process import PolicyProcess
|
from authentik.policies.process import PolicyProcess
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
|
from authentik.stages.authenticator import devices_for_user
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -1,7 +1,112 @@
|
|||||||
"""logging helpers"""
|
"""logging helpers"""
|
||||||
|
import logging
|
||||||
from logging import Logger
|
from logging import Logger
|
||||||
from os import getpid
|
from os import getpid
|
||||||
|
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
LOG_PRE_CHAIN = [
|
||||||
|
# Add the log level and a timestamp to the event_dict if the log entry
|
||||||
|
# is not from structlog.
|
||||||
|
structlog.stdlib.add_log_level,
|
||||||
|
structlog.stdlib.add_logger_name,
|
||||||
|
structlog.processors.TimeStamper(),
|
||||||
|
structlog.processors.StackInfoRenderer(),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_level():
|
||||||
|
"""Get log level, clamp trace to debug"""
|
||||||
|
level = CONFIG.get("log_level").upper()
|
||||||
|
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
||||||
|
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
||||||
|
# Additionally, the entire code uses debug as highest level
|
||||||
|
# so that would have to be re-written too
|
||||||
|
if level == "TRACE":
|
||||||
|
level = "DEBUG"
|
||||||
|
return level
|
||||||
|
|
||||||
|
|
||||||
|
def structlog_configure():
|
||||||
|
"""Configure structlog itself"""
|
||||||
|
structlog.configure_once(
|
||||||
|
processors=[
|
||||||
|
structlog.stdlib.add_log_level,
|
||||||
|
structlog.stdlib.add_logger_name,
|
||||||
|
structlog.contextvars.merge_contextvars,
|
||||||
|
add_process_id,
|
||||||
|
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||||
|
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
||||||
|
structlog.processors.StackInfoRenderer(),
|
||||||
|
structlog.processors.dict_tracebacks,
|
||||||
|
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||||
|
],
|
||||||
|
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||||
|
wrapper_class=structlog.make_filtering_bound_logger(
|
||||||
|
getattr(logging, get_log_level(), logging.WARNING)
|
||||||
|
),
|
||||||
|
cache_logger_on_first_use=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger_config():
|
||||||
|
"""Configure python stdlib's logging"""
|
||||||
|
debug = CONFIG.get_bool("debug")
|
||||||
|
global_level = get_log_level()
|
||||||
|
base_config = {
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"json": {
|
||||||
|
"()": structlog.stdlib.ProcessorFormatter,
|
||||||
|
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
||||||
|
"foreign_pre_chain": LOG_PRE_CHAIN + [structlog.processors.dict_tracebacks],
|
||||||
|
},
|
||||||
|
"console": {
|
||||||
|
"()": structlog.stdlib.ProcessorFormatter,
|
||||||
|
"processor": structlog.dev.ConsoleRenderer(colors=debug),
|
||||||
|
"foreign_pre_chain": LOG_PRE_CHAIN,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"formatter": "console" if debug else "json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"loggers": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
handler_level_map = {
|
||||||
|
"": global_level,
|
||||||
|
"authentik": global_level,
|
||||||
|
"django": "WARNING",
|
||||||
|
"django.request": "ERROR",
|
||||||
|
"celery": "WARNING",
|
||||||
|
"selenium": "WARNING",
|
||||||
|
"docker": "WARNING",
|
||||||
|
"urllib3": "WARNING",
|
||||||
|
"websockets": "WARNING",
|
||||||
|
"daphne": "WARNING",
|
||||||
|
"kubernetes": "INFO",
|
||||||
|
"asyncio": "WARNING",
|
||||||
|
"redis": "WARNING",
|
||||||
|
"silk": "INFO",
|
||||||
|
"fsevents": "WARNING",
|
||||||
|
"uvicorn": "WARNING",
|
||||||
|
"gunicorn": "INFO",
|
||||||
|
}
|
||||||
|
for handler_name, level in handler_level_map.items():
|
||||||
|
base_config["loggers"][handler_name] = {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": level,
|
||||||
|
"propagate": False,
|
||||||
|
}
|
||||||
|
return base_config
|
||||||
|
|
||||||
|
|
||||||
def add_process_id(logger: Logger, method_name: str, event_dict):
|
def add_process_id(logger: Logger, method_name: str, event_dict):
|
||||||
"""Add the current process ID"""
|
"""Add the current process ID"""
|
||||||
|
@ -77,6 +77,7 @@ class PolicyBindingSerializer(ModelSerializer):
|
|||||||
"enabled",
|
"enabled",
|
||||||
"order",
|
"order",
|
||||||
"timeout",
|
"timeout",
|
||||||
|
"failure_result",
|
||||||
]
|
]
|
||||||
|
|
||||||
def validate(self, attrs: OrderedDict) -> OrderedDict:
|
def validate(self, attrs: OrderedDict) -> OrderedDict:
|
||||||
|
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 4.2.5 on 2023-09-13 18:07
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies", "0010_alter_policy_name"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="policybinding",
|
||||||
|
name="failure_result",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False, help_text="Result if the Policy execution fails."
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="policybinding",
|
||||||
|
name="timeout",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=30, help_text="Timeout after which Policy execution is terminated."
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -85,9 +85,12 @@ class PolicyBinding(SerializerModel):
|
|||||||
default=False,
|
default=False,
|
||||||
help_text=_("Negates the outcome of the policy. Messages are unaffected."),
|
help_text=_("Negates the outcome of the policy. Messages are unaffected."),
|
||||||
)
|
)
|
||||||
timeout = models.IntegerField(
|
timeout = models.PositiveIntegerField(
|
||||||
default=30, help_text=_("Timeout after which Policy execution is terminated.")
|
default=30, help_text=_("Timeout after which Policy execution is terminated.")
|
||||||
)
|
)
|
||||||
|
failure_result = models.BooleanField(
|
||||||
|
default=False, help_text=_("Result if the Policy execution fails.")
|
||||||
|
)
|
||||||
|
|
||||||
order = models.IntegerField()
|
order = models.IntegerField()
|
||||||
|
|
||||||
|
@ -98,8 +98,8 @@ class PolicyProcess(PROCESS_CLASS):
|
|||||||
# Create policy exception event, only when we're not debugging
|
# Create policy exception event, only when we're not debugging
|
||||||
if not self.request.debug:
|
if not self.request.debug:
|
||||||
self.create_event(EventAction.POLICY_EXCEPTION, message=error_string)
|
self.create_event(EventAction.POLICY_EXCEPTION, message=error_string)
|
||||||
LOGGER.debug("P_ENG(proc): error", exc=src_exc)
|
LOGGER.debug("P_ENG(proc): error, using failure result", exc=src_exc)
|
||||||
policy_result = PolicyResult(False, str(src_exc))
|
policy_result = PolicyResult(self.binding.failure_result, str(src_exc))
|
||||||
policy_result.source_binding = self.binding
|
policy_result.source_binding = self.binding
|
||||||
should_cache = self.request.should_cache
|
should_cache = self.request.should_cache
|
||||||
if should_cache:
|
if should_cache:
|
||||||
|
@ -97,6 +97,17 @@ class TestPolicyEngine(TestCase):
|
|||||||
self.assertEqual(result.passing, False)
|
self.assertEqual(result.passing, False)
|
||||||
self.assertEqual(result.messages, ("division by zero",))
|
self.assertEqual(result.messages, ("division by zero",))
|
||||||
|
|
||||||
|
def test_engine_policy_error_failure(self):
|
||||||
|
"""Test policy raising an error flag"""
|
||||||
|
pbm = PolicyBindingModel.objects.create()
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
target=pbm, policy=self.policy_raises, order=0, failure_result=True
|
||||||
|
)
|
||||||
|
engine = PolicyEngine(pbm, self.user)
|
||||||
|
result = engine.build().result
|
||||||
|
self.assertEqual(result.passing, True)
|
||||||
|
self.assertEqual(result.messages, ("division by zero",))
|
||||||
|
|
||||||
def test_engine_policy_type(self):
|
def test_engine_policy_type(self):
|
||||||
"""Test invalid policy type"""
|
"""Test invalid policy type"""
|
||||||
pbm = PolicyBindingModel.objects.create()
|
pbm = PolicyBindingModel.objects.create()
|
||||||
|
@ -171,6 +171,8 @@ class MetadataProcessor:
|
|||||||
entity_descriptor, f"{{{NS_SAML_METADATA}}}IDPSSODescriptor"
|
entity_descriptor, f"{{{NS_SAML_METADATA}}}IDPSSODescriptor"
|
||||||
)
|
)
|
||||||
idp_sso_descriptor.attrib["protocolSupportEnumeration"] = NS_SAML_PROTOCOL
|
idp_sso_descriptor.attrib["protocolSupportEnumeration"] = NS_SAML_PROTOCOL
|
||||||
|
if self.provider.verification_kp:
|
||||||
|
idp_sso_descriptor.attrib["WantAuthnRequestsSigned"] = "true"
|
||||||
|
|
||||||
signing_descriptor = self.get_signing_key_descriptor()
|
signing_descriptor = self.get_signing_key_descriptor()
|
||||||
if signing_descriptor is not None:
|
if signing_descriptor is not None:
|
||||||
|
@ -12,7 +12,7 @@ from authentik.lib.xml import lxml_from_string
|
|||||||
from authentik.providers.saml.models import SAMLBindings, SAMLPropertyMapping, SAMLProvider
|
from authentik.providers.saml.models import SAMLBindings, SAMLPropertyMapping, SAMLProvider
|
||||||
from authentik.providers.saml.processors.metadata import MetadataProcessor
|
from authentik.providers.saml.processors.metadata import MetadataProcessor
|
||||||
from authentik.providers.saml.processors.metadata_parser import ServiceProviderMetadataParser
|
from authentik.providers.saml.processors.metadata_parser import ServiceProviderMetadataParser
|
||||||
from authentik.sources.saml.processors.constants import NS_MAP
|
from authentik.sources.saml.processors.constants import NS_MAP, NS_SAML_METADATA
|
||||||
|
|
||||||
|
|
||||||
class TestServiceProviderMetadataParser(TestCase):
|
class TestServiceProviderMetadataParser(TestCase):
|
||||||
@ -55,6 +55,24 @@ class TestServiceProviderMetadataParser(TestCase):
|
|||||||
schema = etree.XMLSchema(etree.parse("schemas/saml-schema-metadata-2.0.xsd")) # nosec
|
schema = etree.XMLSchema(etree.parse("schemas/saml-schema-metadata-2.0.xsd")) # nosec
|
||||||
self.assertTrue(schema.validate(metadata))
|
self.assertTrue(schema.validate(metadata))
|
||||||
|
|
||||||
|
def test_schema_want_authn_requests_signed(self):
|
||||||
|
"""Test metadata generation with WantAuthnRequestsSigned"""
|
||||||
|
cert = create_test_cert()
|
||||||
|
provider = SAMLProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
authorization_flow=self.flow,
|
||||||
|
verification_kp=cert,
|
||||||
|
)
|
||||||
|
Application.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
slug=generate_id(),
|
||||||
|
provider=provider,
|
||||||
|
)
|
||||||
|
request = self.factory.get("/")
|
||||||
|
metadata = lxml_from_string(MetadataProcessor(provider, request).build_entity_descriptor())
|
||||||
|
idp_sso_descriptor = metadata.findall(f"{{{NS_SAML_METADATA}}}IDPSSODescriptor")[0]
|
||||||
|
self.assertEqual(idp_sso_descriptor.attrib["WantAuthnRequestsSigned"], "true")
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""Test simple metadata without Signing"""
|
"""Test simple metadata without Signing"""
|
||||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
||||||
|
@ -172,7 +172,7 @@ class ChannelsLoggingMiddleware:
|
|||||||
LOGGER.info(
|
LOGGER.info(
|
||||||
scope["path"],
|
scope["path"],
|
||||||
scheme="ws",
|
scheme="ws",
|
||||||
remote=scope.get("client", [""])[0],
|
remote=headers.get(b"x-forwarded-for", b"").decode(),
|
||||||
user_agent=headers.get(b"user-agent", b"").decode(),
|
user_agent=headers.get(b"user-agent", b"").decode(),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
@ -1,25 +1,21 @@
|
|||||||
"""root settings for authentik"""
|
"""root settings for authentik"""
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
import structlog
|
|
||||||
from celery.schedules import crontab
|
from celery.schedules import crontab
|
||||||
from sentry_sdk import set_tag
|
from sentry_sdk import set_tag
|
||||||
|
|
||||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.logging import add_process_id
|
from authentik.lib.logging import get_logger_config, structlog_configure
|
||||||
from authentik.lib.sentry import sentry_init
|
from authentik.lib.sentry import sentry_init
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||||
|
|
||||||
LOGGER = structlog.get_logger()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
||||||
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
||||||
MEDIA_ROOT = BASE_DIR / Path("media")
|
MEDIA_ROOT = BASE_DIR / Path("media")
|
||||||
@ -41,6 +37,7 @@ CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF"
|
|||||||
LANGUAGE_COOKIE_NAME = "authentik_language"
|
LANGUAGE_COOKIE_NAME = "authentik_language"
|
||||||
SESSION_COOKIE_NAME = "authentik_session"
|
SESSION_COOKIE_NAME = "authentik_session"
|
||||||
SESSION_COOKIE_DOMAIN = CONFIG.get("cookie_domain", None)
|
SESSION_COOKIE_DOMAIN = CONFIG.get("cookie_domain", None)
|
||||||
|
APPEND_SLASH = False
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = [
|
AUTHENTICATION_BACKENDS = [
|
||||||
"django.contrib.auth.backends.ModelBackend",
|
"django.contrib.auth.backends.ModelBackend",
|
||||||
@ -85,6 +82,7 @@ INSTALLED_APPS = [
|
|||||||
"authentik.sources.oauth",
|
"authentik.sources.oauth",
|
||||||
"authentik.sources.plex",
|
"authentik.sources.plex",
|
||||||
"authentik.sources.saml",
|
"authentik.sources.saml",
|
||||||
|
"authentik.stages.authenticator",
|
||||||
"authentik.stages.authenticator_duo",
|
"authentik.stages.authenticator_duo",
|
||||||
"authentik.stages.authenticator_sms",
|
"authentik.stages.authenticator_sms",
|
||||||
"authentik.stages.authenticator_static",
|
"authentik.stages.authenticator_static",
|
||||||
@ -282,6 +280,9 @@ DATABASES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if CONFIG.get_bool("postgresql.use_pgpool", False):
|
||||||
|
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
||||||
|
|
||||||
if CONFIG.get_bool("postgresql.use_pgbouncer", False):
|
if CONFIG.get_bool("postgresql.use_pgbouncer", False):
|
||||||
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
|
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
|
||||||
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
||||||
@ -332,7 +333,7 @@ LOCALE_PATHS = ["./locale"]
|
|||||||
CELERY = {
|
CELERY = {
|
||||||
"task_soft_time_limit": 600,
|
"task_soft_time_limit": 600,
|
||||||
"worker_max_tasks_per_child": 50,
|
"worker_max_tasks_per_child": 50,
|
||||||
"worker_concurrency": 2,
|
"worker_concurrency": CONFIG.get_int("worker.concurrency"),
|
||||||
"beat_schedule": {
|
"beat_schedule": {
|
||||||
"clean_expired_models": {
|
"clean_expired_models": {
|
||||||
"task": "authentik.core.tasks.clean_expired_models",
|
"task": "authentik.core.tasks.clean_expired_models",
|
||||||
@ -368,90 +369,9 @@ MEDIA_URL = "/media/"
|
|||||||
|
|
||||||
TEST = False
|
TEST = False
|
||||||
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
||||||
# We can't check TEST here as its set later by the test runner
|
|
||||||
LOG_LEVEL = CONFIG.get("log_level").upper() if "TF_BUILD" not in os.environ else "DEBUG"
|
|
||||||
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
|
||||||
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
|
||||||
# Additionally, the entire code uses debug as highest level so that would have to be re-written too
|
|
||||||
if LOG_LEVEL == "TRACE":
|
|
||||||
LOG_LEVEL = "DEBUG"
|
|
||||||
|
|
||||||
structlog.configure_once(
|
structlog_configure()
|
||||||
processors=[
|
LOGGING = get_logger_config()
|
||||||
structlog.stdlib.add_log_level,
|
|
||||||
structlog.stdlib.add_logger_name,
|
|
||||||
structlog.contextvars.merge_contextvars,
|
|
||||||
add_process_id,
|
|
||||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
|
||||||
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
|
||||||
structlog.processors.StackInfoRenderer(),
|
|
||||||
structlog.processors.dict_tracebacks,
|
|
||||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
|
||||||
],
|
|
||||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
|
||||||
wrapper_class=structlog.make_filtering_bound_logger(
|
|
||||||
getattr(logging, LOG_LEVEL, logging.WARNING)
|
|
||||||
),
|
|
||||||
cache_logger_on_first_use=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
LOG_PRE_CHAIN = [
|
|
||||||
# Add the log level and a timestamp to the event_dict if the log entry
|
|
||||||
# is not from structlog.
|
|
||||||
structlog.stdlib.add_log_level,
|
|
||||||
structlog.stdlib.add_logger_name,
|
|
||||||
structlog.processors.TimeStamper(),
|
|
||||||
structlog.processors.StackInfoRenderer(),
|
|
||||||
]
|
|
||||||
|
|
||||||
LOGGING = {
|
|
||||||
"version": 1,
|
|
||||||
"disable_existing_loggers": False,
|
|
||||||
"formatters": {
|
|
||||||
"json": {
|
|
||||||
"()": structlog.stdlib.ProcessorFormatter,
|
|
||||||
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
|
||||||
"foreign_pre_chain": LOG_PRE_CHAIN + [structlog.processors.dict_tracebacks],
|
|
||||||
},
|
|
||||||
"console": {
|
|
||||||
"()": structlog.stdlib.ProcessorFormatter,
|
|
||||||
"processor": structlog.dev.ConsoleRenderer(colors=DEBUG),
|
|
||||||
"foreign_pre_chain": LOG_PRE_CHAIN,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"handlers": {
|
|
||||||
"console": {
|
|
||||||
"level": "DEBUG",
|
|
||||||
"class": "logging.StreamHandler",
|
|
||||||
"formatter": "console" if DEBUG else "json",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"loggers": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
_LOGGING_HANDLER_MAP = {
|
|
||||||
"": LOG_LEVEL,
|
|
||||||
"authentik": LOG_LEVEL,
|
|
||||||
"django": "WARNING",
|
|
||||||
"django.request": "ERROR",
|
|
||||||
"celery": "WARNING",
|
|
||||||
"selenium": "WARNING",
|
|
||||||
"docker": "WARNING",
|
|
||||||
"urllib3": "WARNING",
|
|
||||||
"websockets": "WARNING",
|
|
||||||
"daphne": "WARNING",
|
|
||||||
"kubernetes": "INFO",
|
|
||||||
"asyncio": "WARNING",
|
|
||||||
"redis": "WARNING",
|
|
||||||
"silk": "INFO",
|
|
||||||
"fsevents": "WARNING",
|
|
||||||
}
|
|
||||||
for handler_name, level in _LOGGING_HANDLER_MAP.items():
|
|
||||||
LOGGING["loggers"][handler_name] = {
|
|
||||||
"handlers": ["console"],
|
|
||||||
"level": level,
|
|
||||||
"propagate": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_DISALLOWED_ITEMS = [
|
_DISALLOWED_ITEMS = [
|
||||||
|
@ -20,7 +20,7 @@ class PytestTestRunner: # pragma: no cover
|
|||||||
self.failfast = failfast
|
self.failfast = failfast
|
||||||
self.keepdb = keepdb
|
self.keepdb = keepdb
|
||||||
|
|
||||||
self.args = ["-vv", "--full-trace"]
|
self.args = []
|
||||||
if self.failfast:
|
if self.failfast:
|
||||||
self.args.append("--exitfirst")
|
self.args.append("--exitfirst")
|
||||||
if self.keepdb:
|
if self.keepdb:
|
||||||
|
0
authentik/sources/ldap/api/__init__.py
Normal file
0
authentik/sources/ldap/api/__init__.py
Normal file
40
authentik/sources/ldap/api/property_mappings.py
Normal file
40
authentik/sources/ldap/api/property_mappings.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"""Property mapping API Views"""
|
||||||
|
from django_filters.filters import AllValuesMultipleFilter
|
||||||
|
from django_filters.filterset import FilterSet
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema_field
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.sources.ldap.models import LDAPPropertyMapping
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||||
|
"""LDAP PropertyMapping Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = LDAPPropertyMapping
|
||||||
|
fields = PropertyMappingSerializer.Meta.fields + [
|
||||||
|
"object_field",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPPropertyMappingFilter(FilterSet):
|
||||||
|
"""Filter for LDAPPropertyMapping"""
|
||||||
|
|
||||||
|
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = LDAPPropertyMapping
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""LDAP PropertyMapping Viewset"""
|
||||||
|
|
||||||
|
queryset = LDAPPropertyMapping.objects.all()
|
||||||
|
serializer_class = LDAPPropertyMappingSerializer
|
||||||
|
filterset_class = LDAPPropertyMappingFilter
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
32
authentik/sources/ldap/api/source_connections.py
Normal file
32
authentik/sources/ldap/api/source_connections.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""LDAP Source Serializer"""
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||||
|
from authentik.core.api.sources import UserSourceConnectionSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.sources.ldap.models import LDAPUserSourceConnection
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPUserSourceConnectionSerializer(UserSourceConnectionSerializer):
|
||||||
|
"""LDAP Source Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = LDAPUserSourceConnection
|
||||||
|
fields = ["pk", "user", "source", "unique_identifier"]
|
||||||
|
extra_kwargs = {
|
||||||
|
"access_token": {"write_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPUserSourceConnectionViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""Source Viewset"""
|
||||||
|
|
||||||
|
queryset = LDAPUserSourceConnection.objects.all()
|
||||||
|
serializer_class = LDAPUserSourceConnectionSerializer
|
||||||
|
filterset_fields = ["source__slug"]
|
||||||
|
search_fields = ["source__slug"]
|
||||||
|
permission_classes = [OwnerSuperuserPermissions]
|
||||||
|
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||||
|
ordering = ["source__slug"]
|
@ -1,10 +1,7 @@
|
|||||||
"""Source API Views"""
|
"""Source API Views"""
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django_filters.filters import AllValuesMultipleFilter
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from django_filters.filterset import FilterSet
|
|
||||||
from drf_spectacular.types import OpenApiTypes
|
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_field, inline_serializer
|
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import DictField, ListField
|
from rest_framework.fields import DictField, ListField
|
||||||
@ -14,12 +11,11 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.admin.api.tasks import TaskSerializer
|
from authentik.admin.api.tasks import TaskSerializer
|
||||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
|
||||||
from authentik.core.api.sources import SourceSerializer
|
from authentik.core.api.sources import SourceSerializer
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.monitored_tasks import TaskInfo
|
from authentik.events.monitored_tasks import TaskInfo
|
||||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
from authentik.sources.ldap.models import LDAPSource
|
||||||
from authentik.sources.ldap.tasks import SYNC_CLASSES
|
from authentik.sources.ldap.tasks import SYNC_CLASSES
|
||||||
|
|
||||||
|
|
||||||
@ -154,33 +150,3 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
obj.pop("raw_dn", None)
|
obj.pop("raw_dn", None)
|
||||||
all_objects[class_name].append(obj)
|
all_objects[class_name].append(obj)
|
||||||
return Response(data=all_objects)
|
return Response(data=all_objects)
|
||||||
|
|
||||||
|
|
||||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
|
||||||
"""LDAP PropertyMapping Serializer"""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = LDAPPropertyMapping
|
|
||||||
fields = PropertyMappingSerializer.Meta.fields + [
|
|
||||||
"object_field",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class LDAPPropertyMappingFilter(FilterSet):
|
|
||||||
"""Filter for LDAPPropertyMapping"""
|
|
||||||
|
|
||||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = LDAPPropertyMapping
|
|
||||||
fields = "__all__"
|
|
||||||
|
|
||||||
|
|
||||||
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
|
||||||
"""LDAP PropertyMapping Viewset"""
|
|
||||||
|
|
||||||
queryset = LDAPPropertyMapping.objects.all()
|
|
||||||
serializer_class = LDAPPropertyMappingSerializer
|
|
||||||
filterset_class = LDAPPropertyMappingFilter
|
|
||||||
search_fields = ["name"]
|
|
||||||
ordering = ["name"]
|
|
@ -0,0 +1,58 @@
|
|||||||
|
# Generated by Django 4.2.5 on 2023-09-27 10:44
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0032_groupsourceconnection"),
|
||||||
|
("authentik_sources_ldap", "0003_ldapsource_client_certificate_ldapsource_sni_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="LDAPGroupSourceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"groupsourceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.groupsourceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("unique_identifier", models.TextField(unique=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "LDAP Group Source Connection",
|
||||||
|
"verbose_name_plural": "LDAP Group Source Connections",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.groupsourceconnection",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="LDAPUserSourceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"usersourceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.usersourceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("unique_identifier", models.TextField(unique=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "LDAP User Source Connection",
|
||||||
|
"verbose_name_plural": "LDAP User Source Connections",
|
||||||
|
},
|
||||||
|
bases=("authentik_core.usersourceconnection",),
|
||||||
|
),
|
||||||
|
]
|
@ -10,7 +10,13 @@ from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
|
|||||||
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
from authentik.core.models import Group, PropertyMapping, Source
|
from authentik.core.models import (
|
||||||
|
Group,
|
||||||
|
GroupSourceConnection,
|
||||||
|
PropertyMapping,
|
||||||
|
Source,
|
||||||
|
UserSourceConnection,
|
||||||
|
)
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.models import DomainlessURLValidator
|
from authentik.lib.models import DomainlessURLValidator
|
||||||
@ -113,7 +119,7 @@ class LDAPSource(Source):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.sources.ldap.api import LDAPSourceSerializer
|
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
|
||||||
|
|
||||||
return LDAPSourceSerializer
|
return LDAPSourceSerializer
|
||||||
|
|
||||||
@ -202,7 +208,7 @@ class LDAPPropertyMapping(PropertyMapping):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
from authentik.sources.ldap.api import LDAPPropertyMappingSerializer
|
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingSerializer
|
||||||
|
|
||||||
return LDAPPropertyMappingSerializer
|
return LDAPPropertyMappingSerializer
|
||||||
|
|
||||||
@ -212,3 +218,35 @@ class LDAPPropertyMapping(PropertyMapping):
|
|||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("LDAP Property Mapping")
|
verbose_name = _("LDAP Property Mapping")
|
||||||
verbose_name_plural = _("LDAP Property Mappings")
|
verbose_name_plural = _("LDAP Property Mappings")
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPUserSourceConnection(UserSourceConnection):
|
||||||
|
"""Connection between an authentik user and an LDAP source."""
|
||||||
|
|
||||||
|
unique_identifier = models.TextField(unique=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> Serializer:
|
||||||
|
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionSerializer
|
||||||
|
|
||||||
|
return LDAPUserSourceConnectionSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("LDAP User Source Connection")
|
||||||
|
verbose_name_plural = _("LDAP User Source Connections")
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPGroupSourceConnection(GroupSourceConnection):
|
||||||
|
"""Connection between an authentik group and an LDAP source."""
|
||||||
|
|
||||||
|
unique_identifier = models.TextField(unique=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> Serializer:
|
||||||
|
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionSerializer
|
||||||
|
|
||||||
|
return LDAPUserSourceConnectionSerializer
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("LDAP Group Source Connection")
|
||||||
|
verbose_name_plural = _("LDAP Group Source Connections")
|
||||||
|
@ -133,7 +133,7 @@ class BaseLDAPSynchronizer:
|
|||||||
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
|
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
|
||||||
"""Build attributes for User object based on property mappings."""
|
"""Build attributes for User object based on property mappings."""
|
||||||
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
|
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
|
||||||
props["path"] = self._source.get_user_path()
|
props.setdefault("path", self._source.get_user_path())
|
||||||
return props
|
return props
|
||||||
|
|
||||||
def build_group_properties(self, group_dn: str, **kwargs) -> dict[str, Any]:
|
def build_group_properties(self, group_dn: str, **kwargs) -> dict[str, Any]:
|
||||||
@ -151,10 +151,14 @@ class BaseLDAPSynchronizer:
|
|||||||
continue
|
continue
|
||||||
mapping: LDAPPropertyMapping
|
mapping: LDAPPropertyMapping
|
||||||
try:
|
try:
|
||||||
value = mapping.evaluate(user=None, request=None, ldap=kwargs, dn=object_dn)
|
value = mapping.evaluate(
|
||||||
|
user=None, request=None, ldap=kwargs, dn=object_dn, source=self._source
|
||||||
|
)
|
||||||
if value is None:
|
if value is None:
|
||||||
|
self._logger.warning("property mapping returned None", mapping=mapping)
|
||||||
continue
|
continue
|
||||||
if isinstance(value, (bytes)):
|
if isinstance(value, (bytes)):
|
||||||
|
self._logger.warning("property mapping returned bytes", mapping=mapping)
|
||||||
continue
|
continue
|
||||||
object_field = mapping.object_field
|
object_field = mapping.object_field
|
||||||
if object_field.startswith("attributes."):
|
if object_field.startswith("attributes."):
|
||||||
|
@ -7,6 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||||||
|
|
||||||
from authentik.core.models import Group
|
from authentik.core.models import Group
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.sources.ldap.models import LDAPGroupSourceConnection
|
||||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||||
|
|
||||||
|
|
||||||
@ -63,7 +64,13 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
},
|
},
|
||||||
defaults,
|
defaults,
|
||||||
)
|
)
|
||||||
self._logger.debug("Created group with attributes", **defaults)
|
LDAPGroupSourceConnection.objects.update_or_create(
|
||||||
|
defaults={
|
||||||
|
"unique_identifier": uniq,
|
||||||
|
},
|
||||||
|
source=self._source,
|
||||||
|
group=ak_group,
|
||||||
|
)
|
||||||
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
||||||
Event.new(
|
Event.new(
|
||||||
EventAction.CONFIGURATION_ERROR,
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
@ -7,6 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.sources.ldap.models import LDAPUserSourceConnection
|
||||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||||
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
|
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
|
||||||
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
|
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
|
||||||
@ -58,6 +59,13 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
ak_user, created = self.update_or_create_attributes(
|
ak_user, created = self.update_or_create_attributes(
|
||||||
User, {f"attributes__{LDAP_UNIQUENESS}": uniq}, defaults
|
User, {f"attributes__{LDAP_UNIQUENESS}": uniq}, defaults
|
||||||
)
|
)
|
||||||
|
LDAPUserSourceConnection.objects.update_or_create(
|
||||||
|
defaults={
|
||||||
|
"unique_identifier": uniq,
|
||||||
|
},
|
||||||
|
source=self._source,
|
||||||
|
user=ak_user,
|
||||||
|
)
|
||||||
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
||||||
Event.new(
|
Event.new(
|
||||||
EventAction.CONFIGURATION_ERROR,
|
EventAction.CONFIGURATION_ERROR,
|
||||||
@ -72,6 +80,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
else:
|
else:
|
||||||
self._logger.debug("Synced User", user=ak_user.username, created=created)
|
self._logger.debug("Synced User", user=ak_user.username, created=created)
|
||||||
user_count += 1
|
user_count += 1
|
||||||
|
# TODO: Optimise vendor sync to not create a new connection
|
||||||
MicrosoftActiveDirectory(self._source).sync(attributes, ak_user, created)
|
MicrosoftActiveDirectory(self._source).sync(attributes, ak_user, created)
|
||||||
FreeIPA(self._source).sync(attributes, ak_user, created)
|
FreeIPA(self._source).sync(attributes, ak_user, created)
|
||||||
return user_count
|
return user_count
|
||||||
|
@ -47,9 +47,11 @@ class FreeIPA(BaseLDAPSynchronizer):
|
|||||||
return
|
return
|
||||||
# For some reason, nsaccountlock is not defined properly in the schema as bool
|
# For some reason, nsaccountlock is not defined properly in the schema as bool
|
||||||
# hence we get it as a list of strings
|
# hence we get it as a list of strings
|
||||||
_is_active = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
_is_locked = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||||
# So we have to attempt to convert it to a bool
|
# So we have to attempt to convert it to a bool
|
||||||
is_active = _is_active.lower() == "true"
|
is_locked = _is_locked.lower() == "true"
|
||||||
|
# And then invert it since freeipa saves locked and we save active
|
||||||
|
is_active = not is_locked
|
||||||
if is_active != user.is_active:
|
if is_active != user.is_active:
|
||||||
user.is_active = is_active
|
user.is_active = is_active
|
||||||
user.save()
|
user.save()
|
||||||
|
@ -4,6 +4,8 @@ from uuid import uuid4
|
|||||||
from celery import chain, group
|
from celery import chain, group
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from ldap3.core.exceptions import LDAPException
|
from ldap3.core.exceptions import LDAPException
|
||||||
|
from redis.exceptions import LockError
|
||||||
|
from redis.lock import Lock
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||||
@ -45,18 +47,28 @@ def ldap_sync_single(source_pk: str):
|
|||||||
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
||||||
if not source:
|
if not source:
|
||||||
return
|
return
|
||||||
task = chain(
|
lock = Lock(cache.client.get_client(), name=f"goauthentik.io/sources/ldap/sync-{source.slug}")
|
||||||
# User and group sync can happen at once, they have no dependencies on each other
|
if lock.locked():
|
||||||
group(
|
LOGGER.debug("LDAP sync locked, skipping task", source=source.slug)
|
||||||
ldap_sync_paginator(source, UserLDAPSynchronizer)
|
return
|
||||||
+ ldap_sync_paginator(source, GroupLDAPSynchronizer),
|
try:
|
||||||
),
|
with lock:
|
||||||
# Membership sync needs to run afterwards
|
task = chain(
|
||||||
group(
|
# User and group sync can happen at once, they have no dependencies on each other
|
||||||
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
|
group(
|
||||||
),
|
ldap_sync_paginator(source, UserLDAPSynchronizer)
|
||||||
)
|
+ ldap_sync_paginator(source, GroupLDAPSynchronizer),
|
||||||
task()
|
),
|
||||||
|
# Membership sync needs to run afterwards
|
||||||
|
group(
|
||||||
|
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
task()
|
||||||
|
except LockError:
|
||||||
|
# This should never happen, we check if the lock is locked above so this
|
||||||
|
# would only happen if there was some other timeout
|
||||||
|
LOGGER.debug("Failed to acquire lock for LDAP sync", source=source.slug)
|
||||||
|
|
||||||
|
|
||||||
def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) -> list:
|
def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) -> list:
|
||||||
|
@ -55,7 +55,7 @@ def mock_ad_connection(password: str) -> Connection:
|
|||||||
"revision": 0,
|
"revision": 0,
|
||||||
"objectSid": "user0",
|
"objectSid": "user0",
|
||||||
"objectClass": "person",
|
"objectClass": "person",
|
||||||
"distinguishedName": "cn=user0,ou=users,dc=goauthentik,dc=io",
|
"distinguishedName": "cn=user0,ou=foo,ou=users,dc=goauthentik,dc=io",
|
||||||
"userAccountControl": (
|
"userAccountControl": (
|
||||||
UserAccountControl.ACCOUNTDISABLE + UserAccountControl.NORMAL_ACCOUNT
|
UserAccountControl.ACCOUNTDISABLE + UserAccountControl.NORMAL_ACCOUNT
|
||||||
),
|
),
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_key
|
||||||
from authentik.sources.ldap.api import LDAPSourceSerializer
|
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
|
||||||
from authentik.sources.ldap.models import LDAPSource
|
from authentik.sources.ldap.models import LDAPSource
|
||||||
|
|
||||||
LDAP_PASSWORD = generate_key()
|
LDAP_PASSWORD = generate_key()
|
||||||
|
@ -9,7 +9,7 @@ from authentik.core.models import Group, User
|
|||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_id, generate_key
|
||||||
from authentik.lib.utils.reflection import class_to_path
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||||
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||||
@ -71,6 +71,28 @@ class LDAPSyncTests(TestCase):
|
|||||||
)
|
)
|
||||||
self.assertTrue(events.exists())
|
self.assertTrue(events.exists())
|
||||||
|
|
||||||
|
def test_sync_mapping(self):
|
||||||
|
"""Test property mappings"""
|
||||||
|
none = LDAPPropertyMapping.objects.create(
|
||||||
|
name=generate_id(), object_field="none", expression="return None"
|
||||||
|
)
|
||||||
|
byte_mapping = LDAPPropertyMapping.objects.create(
|
||||||
|
name=generate_id(), object_field="bytes", expression="return b''"
|
||||||
|
)
|
||||||
|
self.source.property_mappings.set(
|
||||||
|
LDAPPropertyMapping.objects.filter(
|
||||||
|
Q(managed__startswith="goauthentik.io/sources/ldap/default")
|
||||||
|
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.source.property_mappings.add(none, byte_mapping)
|
||||||
|
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||||
|
|
||||||
|
# we basically just test that the mappings don't throw errors
|
||||||
|
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||||
|
user_sync = UserLDAPSynchronizer(self.source)
|
||||||
|
user_sync.sync_full()
|
||||||
|
|
||||||
def test_sync_users_ad(self):
|
def test_sync_users_ad(self):
|
||||||
"""Test user sync"""
|
"""Test user sync"""
|
||||||
self.source.property_mappings.set(
|
self.source.property_mappings.set(
|
||||||
@ -79,7 +101,6 @@ class LDAPSyncTests(TestCase):
|
|||||||
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
|
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.source.save()
|
|
||||||
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||||
|
|
||||||
# Create the user beforehand so we can set attributes and check they aren't removed
|
# Create the user beforehand so we can set attributes and check they aren't removed
|
||||||
@ -102,6 +123,7 @@ class LDAPSyncTests(TestCase):
|
|||||||
user = User.objects.filter(username="user0_sn").first()
|
user = User.objects.filter(username="user0_sn").first()
|
||||||
self.assertEqual(user.attributes["foo"], "bar")
|
self.assertEqual(user.attributes["foo"], "bar")
|
||||||
self.assertFalse(user.is_active)
|
self.assertFalse(user.is_active)
|
||||||
|
self.assertEqual(user.path, "goauthentik.io/sources/ldap/users/foo")
|
||||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||||
|
|
||||||
def test_sync_users_openldap(self):
|
def test_sync_users_openldap(self):
|
||||||
@ -113,7 +135,6 @@ class LDAPSyncTests(TestCase):
|
|||||||
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.source.save()
|
|
||||||
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
|
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
|
||||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||||
user_sync = UserLDAPSynchronizer(self.source)
|
user_sync = UserLDAPSynchronizer(self.source)
|
||||||
@ -130,13 +151,13 @@ class LDAPSyncTests(TestCase):
|
|||||||
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.source.save()
|
|
||||||
connection = MagicMock(return_value=mock_freeipa_connection(LDAP_PASSWORD))
|
connection = MagicMock(return_value=mock_freeipa_connection(LDAP_PASSWORD))
|
||||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||||
user_sync = UserLDAPSynchronizer(self.source)
|
user_sync = UserLDAPSynchronizer(self.source)
|
||||||
user_sync.sync_full()
|
user_sync.sync_full()
|
||||||
self.assertTrue(User.objects.filter(username="user0_sn").exists())
|
self.assertTrue(User.objects.filter(username="user0_sn").exists())
|
||||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||||
|
self.assertFalse(User.objects.get(username="user-nsaccountlock").is_active)
|
||||||
|
|
||||||
def test_sync_groups_ad(self):
|
def test_sync_groups_ad(self):
|
||||||
"""Test group sync"""
|
"""Test group sync"""
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingViewSet
|
||||||
|
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionViewSet
|
||||||
|
from authentik.sources.ldap.api.sources import LDAPSourceViewSet
|
||||||
|
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
("propertymappings/ldap", LDAPPropertyMappingViewSet),
|
("propertymappings/ldap", LDAPPropertyMappingViewSet),
|
||||||
|
("sources/user_connections/ldap", LDAPUserSourceConnectionViewSet),
|
||||||
("sources/ldap", LDAPSourceViewSet),
|
("sources/ldap", LDAPSourceViewSet),
|
||||||
]
|
]
|
||||||
|
@ -68,7 +68,7 @@ class OAuthSource(Source):
|
|||||||
# we're using Type[] instead of type[] here since type[] interferes with the property above
|
# we're using Type[] instead of type[] here since type[] interferes with the property above
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Type[Serializer]:
|
def serializer(self) -> Type[Serializer]:
|
||||||
from authentik.sources.oauth.api.source import OAuthSourceSerializer
|
from authentik.sources.oauth.api.sources import OAuthSourceSerializer
|
||||||
|
|
||||||
return OAuthSourceSerializer
|
return OAuthSourceSerializer
|
||||||
|
|
||||||
@ -234,7 +234,7 @@ class UserOAuthSourceConnection(UserSourceConnection):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Serializer:
|
def serializer(self) -> Serializer:
|
||||||
from authentik.sources.oauth.api.source_connection import (
|
from authentik.sources.oauth.api.source_connections import (
|
||||||
UserOAuthSourceConnectionSerializer,
|
UserOAuthSourceConnectionSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ from django.test import TestCase
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
|
|
||||||
from authentik.sources.oauth.api.source import OAuthSourceSerializer
|
from authentik.sources.oauth.api.sources import OAuthSourceSerializer
|
||||||
from authentik.sources.oauth.models import OAuthSource
|
from authentik.sources.oauth.models import OAuthSource
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
from authentik.sources.oauth.api.source_connections import UserOAuthSourceConnectionViewSet
|
||||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
from authentik.sources.oauth.api.sources import OAuthSourceViewSet
|
||||||
from authentik.sources.oauth.types.registry import RequestKind
|
from authentik.sources.oauth.types.registry import RequestKind
|
||||||
from authentik.sources.oauth.views.dispatcher import DispatcherView
|
from authentik.sources.oauth.views.dispatcher import DispatcherView
|
||||||
|
|
||||||
|
129
authentik/stages/authenticator/__init__.py
Normal file
129
authentik/stages/authenticator/__init__.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
"""Authenticator devices helpers"""
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
|
||||||
|
def verify_token(user, device_id, token):
|
||||||
|
"""
|
||||||
|
Attempts to verify a :term:`token` against a specific device, identified by
|
||||||
|
:attr:`~authentik.stages.authenticator.models.Device.persistent_id`.
|
||||||
|
|
||||||
|
This wraps the verification process in a transaction to ensure that things
|
||||||
|
like throttling polices are properly enforced.
|
||||||
|
|
||||||
|
:param user: The user supplying the token.
|
||||||
|
:type user: :class:`~django.contrib.auth.models.User`
|
||||||
|
|
||||||
|
:param str device_id: A device's persistent_id value.
|
||||||
|
|
||||||
|
:param str token: An OTP token to verify.
|
||||||
|
|
||||||
|
:returns: The device that accepted ``token``, if any.
|
||||||
|
:rtype: :class:`~authentik.stages.authenticator.models.Device` or ``None``
|
||||||
|
|
||||||
|
"""
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
|
||||||
|
verified = None
|
||||||
|
with transaction.atomic():
|
||||||
|
device = Device.from_persistent_id(device_id, for_verify=True)
|
||||||
|
if (device is not None) and (device.user_id == user.pk) and device.verify_token(token):
|
||||||
|
verified = device
|
||||||
|
|
||||||
|
return verified
|
||||||
|
|
||||||
|
|
||||||
|
def match_token(user, token):
|
||||||
|
"""
|
||||||
|
Attempts to verify a :term:`token` on every device attached to the given
|
||||||
|
user until one of them succeeds.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
This originally existed for more convenient integration with the admin
|
||||||
|
site. Its use is no longer recommended and it is not guaranteed to
|
||||||
|
interact well with more recent features (such as throttling). Tokens
|
||||||
|
should always be verified against specific devices.
|
||||||
|
|
||||||
|
:param user: The user supplying the token.
|
||||||
|
:type user: :class:`~django.contrib.auth.models.User`
|
||||||
|
|
||||||
|
:param str token: An OTP token to verify.
|
||||||
|
|
||||||
|
:returns: The device that accepted ``token``, if any.
|
||||||
|
:rtype: :class:`~authentik.stages.authenticator.models.Device` or ``None``
|
||||||
|
"""
|
||||||
|
with transaction.atomic():
|
||||||
|
for device in devices_for_user(user, for_verify=True):
|
||||||
|
if device.verify_token(token):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
device = None
|
||||||
|
|
||||||
|
return device
|
||||||
|
|
||||||
|
|
||||||
|
def devices_for_user(user, confirmed=True, for_verify=False):
|
||||||
|
"""
|
||||||
|
Return an iterable of all devices registered to the given user.
|
||||||
|
|
||||||
|
Returns an empty iterable for anonymous users.
|
||||||
|
|
||||||
|
:param user: standard or custom user object.
|
||||||
|
:type user: :class:`~django.contrib.auth.models.User`
|
||||||
|
|
||||||
|
:param bool confirmed: If ``None``, all matching devices are returned.
|
||||||
|
Otherwise, this can be any true or false value to limit the query
|
||||||
|
to confirmed or unconfirmed devices, respectively.
|
||||||
|
|
||||||
|
:param bool for_verify: If ``True``, we'll load the devices with
|
||||||
|
:meth:`~django.db.models.query.QuerySet.select_for_update` to prevent
|
||||||
|
concurrent verifications from succeeding. In which case, this must be
|
||||||
|
called inside a transaction.
|
||||||
|
|
||||||
|
:rtype: iterable
|
||||||
|
"""
|
||||||
|
if user.is_anonymous:
|
||||||
|
return
|
||||||
|
|
||||||
|
for model in device_classes():
|
||||||
|
device_set = model.objects.devices_for_user(user, confirmed=confirmed)
|
||||||
|
if for_verify:
|
||||||
|
device_set = device_set.select_for_update()
|
||||||
|
|
||||||
|
yield from device_set
|
||||||
|
|
||||||
|
|
||||||
|
def user_has_device(user, confirmed=True):
|
||||||
|
"""
|
||||||
|
Return ``True`` if the user has at least one device.
|
||||||
|
|
||||||
|
Returns ``False`` for anonymous users.
|
||||||
|
|
||||||
|
:param user: standard or custom user object.
|
||||||
|
:type user: :class:`~django.contrib.auth.models.User`
|
||||||
|
|
||||||
|
:param confirmed: If ``None``, all matching devices are considered.
|
||||||
|
Otherwise, this can be any true or false value to limit the query
|
||||||
|
to confirmed or unconfirmed devices, respectively.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
next(devices_for_user(user, confirmed=confirmed))
|
||||||
|
except StopIteration:
|
||||||
|
has_device = False
|
||||||
|
else:
|
||||||
|
has_device = True
|
||||||
|
|
||||||
|
return has_device
|
||||||
|
|
||||||
|
|
||||||
|
def device_classes():
|
||||||
|
"""
|
||||||
|
Returns an iterable of all loaded device models.
|
||||||
|
"""
|
||||||
|
from django.apps import apps # isort: skip
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
|
||||||
|
for config in apps.get_app_configs():
|
||||||
|
for model in config.get_models():
|
||||||
|
if issubclass(model, Device):
|
||||||
|
yield model
|
10
authentik/stages/authenticator/apps.py
Normal file
10
authentik/stages/authenticator/apps.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Authenticator"""
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikStageAuthenticatorConfig(AppConfig):
|
||||||
|
"""Authenticator App config"""
|
||||||
|
|
||||||
|
name = "authentik.stages.authenticator"
|
||||||
|
label = "authentik_stages_authenticator"
|
||||||
|
verbose_name = "authentik Stages.Authenticator"
|
401
authentik/stages/authenticator/models.py
Normal file
401
authentik/stages/authenticator/models.py
Normal file
@ -0,0 +1,401 @@
|
|||||||
|
"""Base authenticator models"""
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.apps import apps
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
|
from django.db import models
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.stages.authenticator.util import random_number_token
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceManager(models.Manager):
|
||||||
|
"""
|
||||||
|
The :class:`~django.db.models.Manager` object installed as
|
||||||
|
``Device.objects``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def devices_for_user(self, user, confirmed=None):
|
||||||
|
"""
|
||||||
|
Returns a queryset for all devices of this class that belong to the
|
||||||
|
given user.
|
||||||
|
|
||||||
|
:param user: The user.
|
||||||
|
:type user: :class:`~django.contrib.auth.models.User`
|
||||||
|
|
||||||
|
:param confirmed: If ``None``, all matching devices are returned.
|
||||||
|
Otherwise, this can be any true or false value to limit the query
|
||||||
|
to confirmed or unconfirmed devices, respectively.
|
||||||
|
"""
|
||||||
|
devices = self.model.objects.filter(user=user)
|
||||||
|
if confirmed is not None:
|
||||||
|
devices = devices.filter(confirmed=bool(confirmed))
|
||||||
|
|
||||||
|
return devices
|
||||||
|
|
||||||
|
|
||||||
|
class Device(models.Model):
|
||||||
|
"""
|
||||||
|
Abstract base model for a :term:`device` attached to a user. Plugins must
|
||||||
|
subclass this to define their OTP models.
|
||||||
|
|
||||||
|
.. _unsaved_device_warning:
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
OTP devices are inherently stateful. For example, verifying a token is
|
||||||
|
logically a mutating operation on the device, which may involve
|
||||||
|
incrementing a counter or otherwise consuming a token. A device must be
|
||||||
|
committed to the database before it can be used in any way.
|
||||||
|
|
||||||
|
.. attribute:: user
|
||||||
|
|
||||||
|
*ForeignKey*: Foreign key to your user model, as configured by
|
||||||
|
:setting:`AUTH_USER_MODEL` (:class:`~django.contrib.auth.models.User`
|
||||||
|
by default).
|
||||||
|
|
||||||
|
.. attribute:: name
|
||||||
|
|
||||||
|
*CharField*: A human-readable name to help the user identify their
|
||||||
|
devices.
|
||||||
|
|
||||||
|
.. attribute:: confirmed
|
||||||
|
|
||||||
|
*BooleanField*: A boolean value that tells us whether this device has
|
||||||
|
been confirmed as valid. It defaults to ``True``, but subclasses or
|
||||||
|
individual deployments can force it to ``False`` if they wish to create
|
||||||
|
a device and then ask the user for confirmation. As a rule, built-in
|
||||||
|
APIs that enumerate devices will only include those that are confirmed.
|
||||||
|
|
||||||
|
.. attribute:: objects
|
||||||
|
|
||||||
|
A :class:`~authentik.stages.authenticator.models.DeviceManager`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = models.ForeignKey(
|
||||||
|
User,
|
||||||
|
help_text="The user that this device belongs to.",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
name = models.CharField(max_length=64, help_text="The human-readable name of this device.")
|
||||||
|
|
||||||
|
confirmed = models.BooleanField(default=True, help_text="Is this device ready for use?")
|
||||||
|
|
||||||
|
objects = DeviceManager()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
try:
|
||||||
|
user = self.user
|
||||||
|
except ObjectDoesNotExist:
|
||||||
|
user = None
|
||||||
|
|
||||||
|
return "{0} ({1})".format(self.name, user)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def persistent_id(self):
|
||||||
|
"""
|
||||||
|
A stable device identifier for forms and APIs.
|
||||||
|
"""
|
||||||
|
return "{0}/{1}".format(self.model_label(), self.id)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def model_label(cls):
|
||||||
|
"""
|
||||||
|
Returns an identifier for this Django model class.
|
||||||
|
|
||||||
|
This is just the standard "<app_label>.<model_name>" form.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return "{0}.{1}".format(cls._meta.app_label, cls._meta.model_name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_persistent_id(cls, persistent_id, for_verify=False):
|
||||||
|
"""
|
||||||
|
Loads a device from its persistent id::
|
||||||
|
|
||||||
|
device == Device.from_persistent_id(device.persistent_id)
|
||||||
|
|
||||||
|
:param bool for_verify: If ``True``, we'll load the device with
|
||||||
|
:meth:`~django.db.models.query.QuerySet.select_for_update` to
|
||||||
|
prevent concurrent verifications from succeeding. In which case,
|
||||||
|
this must be called inside a transaction.
|
||||||
|
|
||||||
|
"""
|
||||||
|
device = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
model_label, device_id = persistent_id.rsplit("/", 1)
|
||||||
|
app_label, model_name = model_label.split(".")
|
||||||
|
|
||||||
|
device_cls = apps.get_model(app_label, model_name)
|
||||||
|
if issubclass(device_cls, Device):
|
||||||
|
device_set = device_cls.objects.filter(id=int(device_id))
|
||||||
|
if for_verify:
|
||||||
|
device_set = device_set.select_for_update()
|
||||||
|
device = device_set.first()
|
||||||
|
except (ValueError, LookupError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return device
|
||||||
|
|
||||||
|
def is_interactive(self):
|
||||||
|
"""
|
||||||
|
Returns ``True`` if this is an interactive device. The default
|
||||||
|
implementation returns ``True`` if
|
||||||
|
:meth:`~authentik.stages.authenticator.models.Device.generate_challenge` has been
|
||||||
|
overridden, but subclasses are welcome to provide smarter
|
||||||
|
implementations.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return not hasattr(self.generate_challenge, "stub")
|
||||||
|
|
||||||
|
def generate_challenge(self):
|
||||||
|
"""
|
||||||
|
Generates a challenge value that the user will need to produce a token.
|
||||||
|
This method is permitted to have side effects, such as transmitting
|
||||||
|
information to the user through some other channel (email or SMS,
|
||||||
|
perhaps). And, of course, some devices may need to commit the
|
||||||
|
challenge to the database.
|
||||||
|
|
||||||
|
:returns: A message to the user. This should be a string that fits
|
||||||
|
comfortably in the template ``'OTP Challenge: {0}'``. This may
|
||||||
|
return ``None`` if this device is not interactive.
|
||||||
|
:rtype: string or ``None``
|
||||||
|
|
||||||
|
:raises: Any :exc:`~exceptions.Exception` is permitted. Callers should
|
||||||
|
trap ``Exception`` and report it to the user.
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
generate_challenge.stub = True
|
||||||
|
|
||||||
|
def verify_is_allowed(self):
|
||||||
|
"""
|
||||||
|
Checks whether it is permissible to call :meth:`verify_token`. If it is
|
||||||
|
allowed, returns ``(True, None)``. Otherwise returns ``(False,
|
||||||
|
data_dict)``, where ``data_dict`` contains extra information, defined
|
||||||
|
by the implementation.
|
||||||
|
|
||||||
|
This method can be used to implement throttling or locking, for
|
||||||
|
example. Client code should check this method before calling
|
||||||
|
:meth:`verify_token` and report problems to the user.
|
||||||
|
|
||||||
|
To report specific problems, the data dictionary can return include a
|
||||||
|
``'reason'`` member with a value from the constants in
|
||||||
|
:class:`VerifyNotAllowed`. Otherwise, an ``'error_message'`` member
|
||||||
|
should be provided with an error message.
|
||||||
|
|
||||||
|
:meth:`verify_token` should also call this method and return False if
|
||||||
|
verification is not allowed.
|
||||||
|
|
||||||
|
:rtype: (bool, dict or ``None``)
|
||||||
|
|
||||||
|
"""
|
||||||
|
return (True, None)
|
||||||
|
|
||||||
|
def verify_token(self, token):
|
||||||
|
"""
|
||||||
|
Verifies a token. As a rule, the token should no longer be valid if
|
||||||
|
this returns ``True``.
|
||||||
|
|
||||||
|
:param str token: The OTP token provided by the user.
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class SideChannelDevice(Device):
|
||||||
|
"""
|
||||||
|
Abstract base model for a side-channel :term:`device` attached to a user.
|
||||||
|
|
||||||
|
This model implements token generation, verification and expiration, so the
|
||||||
|
concrete devices only have to implement delivery.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
token = models.CharField(max_length=16, blank=True, null=True)
|
||||||
|
|
||||||
|
valid_until = models.DateTimeField(
|
||||||
|
default=timezone.now,
|
||||||
|
help_text="The timestamp of the moment of expiry of the saved token.",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
def generate_token(self, length=6, valid_secs=300, commit=True):
|
||||||
|
"""
|
||||||
|
Generates a token of the specified length, then sets it on the model
|
||||||
|
and sets the expiration of the token on the model.
|
||||||
|
|
||||||
|
Pass 'commit=False' to avoid calling self.save().
|
||||||
|
|
||||||
|
:param int length: Number of decimal digits in the generated token.
|
||||||
|
:param int valid_secs: Amount of seconds the token should be valid.
|
||||||
|
:param bool commit: Whether to autosave the generated token.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.token = random_number_token(length)
|
||||||
|
self.valid_until = timezone.now() + timedelta(seconds=valid_secs)
|
||||||
|
if commit:
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def verify_token(self, token):
|
||||||
|
"""
|
||||||
|
Verifies a token by content and expiry.
|
||||||
|
|
||||||
|
On success, the token is cleared and the device saved.
|
||||||
|
|
||||||
|
:param str token: The OTP token provided by the user.
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
"""
|
||||||
|
_now = timezone.now()
|
||||||
|
|
||||||
|
if (self.token is not None) and (token == self.token) and (_now < self.valid_until):
|
||||||
|
self.token = None
|
||||||
|
self.valid_until = _now
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class VerifyNotAllowed:
|
||||||
|
"""
|
||||||
|
Constants that may be returned in the ``reason`` member of the extra
|
||||||
|
information dictionary returned by
|
||||||
|
:meth:`~authentik.stages.authenticator.models.Device.verify_is_allowed`
|
||||||
|
|
||||||
|
.. data:: N_FAILED_ATTEMPTS
|
||||||
|
|
||||||
|
Indicates that verification is disallowed because of ``n`` successive
|
||||||
|
failed attempts. The data dictionary should include the value of ``n``
|
||||||
|
in member ``failure_count``
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
N_FAILED_ATTEMPTS = "N_FAILED_ATTEMPTS"
|
||||||
|
|
||||||
|
|
||||||
|
class ThrottlingMixin(models.Model):
|
||||||
|
"""
|
||||||
|
Mixin class for models that want throttling behaviour.
|
||||||
|
|
||||||
|
This implements exponential back-off for verifying tokens. Subclasses must
|
||||||
|
implement :meth:`get_throttle_factor`, and must use the
|
||||||
|
:meth:`verify_is_allowed`, :meth:`throttle_reset` and
|
||||||
|
:meth:`throttle_increment` methods from within their verify_token() method.
|
||||||
|
|
||||||
|
See the implementation of
|
||||||
|
:class:`~authentik.stages.authenticator.plugins.otp_email.models.EmailDevice` for an example.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
throttling_failure_timestamp = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text=(
|
||||||
|
"A timestamp of the last failed verification attempt. "
|
||||||
|
"Null if last attempt succeeded."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
throttling_failure_count = models.PositiveIntegerField(
|
||||||
|
default=0, help_text="Number of successive failed attempts."
|
||||||
|
)
|
||||||
|
|
||||||
|
def verify_is_allowed(self):
|
||||||
|
"""
|
||||||
|
If verification is allowed, returns ``(True, None)``.
|
||||||
|
Otherwise, returns ``(False, data_dict)``.
|
||||||
|
|
||||||
|
``data_dict`` contains further information. Currently it can be::
|
||||||
|
|
||||||
|
{
|
||||||
|
'reason': VerifyNotAllowed.N_FAILED_ATTEMPTS,
|
||||||
|
'failure_count': n
|
||||||
|
}
|
||||||
|
|
||||||
|
where ``n`` is the number of successive failures. See
|
||||||
|
:class:`~authentik.stages.authenticator.models.VerifyNotAllowed`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
self.throttling_enabled
|
||||||
|
and self.throttling_failure_count > 0
|
||||||
|
and self.throttling_failure_timestamp is not None
|
||||||
|
):
|
||||||
|
now = timezone.now()
|
||||||
|
delay = (now - self.throttling_failure_timestamp).total_seconds()
|
||||||
|
# Required delays should be 1, 2, 4, 8 ...
|
||||||
|
delay_required = self.get_throttle_factor() * (2 ** (self.throttling_failure_count - 1))
|
||||||
|
if delay < delay_required:
|
||||||
|
return (
|
||||||
|
False,
|
||||||
|
{
|
||||||
|
"reason": VerifyNotAllowed.N_FAILED_ATTEMPTS,
|
||||||
|
"failure_count": self.throttling_failure_count,
|
||||||
|
"locked_until": self.throttling_failure_timestamp
|
||||||
|
+ timedelta(seconds=delay_required),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().verify_is_allowed()
|
||||||
|
|
||||||
|
def throttle_reset(self, commit=True):
|
||||||
|
"""
|
||||||
|
Call this method to reset throttling (normally when a verify attempt
|
||||||
|
succeeded).
|
||||||
|
|
||||||
|
Pass 'commit=False' to avoid calling self.save().
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.throttling_failure_timestamp = None
|
||||||
|
self.throttling_failure_count = 0
|
||||||
|
if commit:
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def throttle_increment(self, commit=True):
|
||||||
|
"""
|
||||||
|
Call this method to increase throttling (normally when a verify attempt
|
||||||
|
failed).
|
||||||
|
|
||||||
|
Pass 'commit=False' to avoid calling self.save().
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.throttling_failure_timestamp = timezone.now()
|
||||||
|
self.throttling_failure_count += 1
|
||||||
|
if commit:
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def throttling_enabled(self) -> bool:
|
||||||
|
"""Check if throttling is enabled"""
|
||||||
|
return self.get_throttle_factor() > 0
|
||||||
|
|
||||||
|
def get_throttle_factor(self): # pragma: no cover
|
||||||
|
"""
|
||||||
|
This must be implemented to return the throttle factor.
|
||||||
|
|
||||||
|
The number of seconds required between verification attempts will be
|
||||||
|
:math:`c2^{n-1}` where `c` is this factor and `n` is the number of
|
||||||
|
previous failures. A factor of 1 translates to delays of 1, 2, 4, 8,
|
||||||
|
etc. seconds. A factor of 0 disables the throttling.
|
||||||
|
|
||||||
|
Normally this is just a wrapper for a plugin-specific setting like
|
||||||
|
:setting:`OTP_EMAIL_THROTTLE_FACTOR`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
199
authentik/stages/authenticator/oath.py
Normal file
199
authentik/stages/authenticator/oath.py
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
"""OATH helpers"""
|
||||||
|
import hmac
|
||||||
|
from hashlib import sha1
|
||||||
|
from struct import pack
|
||||||
|
from time import time
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
def hotp(key: bytes, counter: int, digits=6) -> int:
|
||||||
|
"""
|
||||||
|
Implementation of the HOTP algorithm from `RFC 4226
|
||||||
|
<http://tools.ietf.org/html/rfc4226#section-5>`_.
|
||||||
|
|
||||||
|
:param bytes key: The shared secret. A 20-byte string is recommended.
|
||||||
|
:param int counter: The password counter.
|
||||||
|
:param int digits: The number of decimal digits to generate.
|
||||||
|
|
||||||
|
:returns: The HOTP token.
|
||||||
|
:rtype: int
|
||||||
|
|
||||||
|
>>> key = b'12345678901234567890'
|
||||||
|
>>> for c in range(10):
|
||||||
|
... hotp(key, c)
|
||||||
|
755224
|
||||||
|
287082
|
||||||
|
359152
|
||||||
|
969429
|
||||||
|
338314
|
||||||
|
254676
|
||||||
|
287922
|
||||||
|
162583
|
||||||
|
399871
|
||||||
|
520489
|
||||||
|
"""
|
||||||
|
msg = pack(b">Q", counter)
|
||||||
|
hs = hmac.new(key, msg, sha1).digest()
|
||||||
|
hs = list(iter(hs))
|
||||||
|
|
||||||
|
offset = hs[19] & 0x0F
|
||||||
|
bin_code = (
|
||||||
|
(hs[offset] & 0x7F) << 24 | hs[offset + 1] << 16 | hs[offset + 2] << 8 | hs[offset + 3]
|
||||||
|
)
|
||||||
|
return bin_code % pow(10, digits)
|
||||||
|
|
||||||
|
|
||||||
|
def totp(key: bytes, step=30, t0=0, digits=6, drift=0) -> int:
|
||||||
|
"""
|
||||||
|
Implementation of the TOTP algorithm from `RFC 6238
|
||||||
|
<http://tools.ietf.org/html/rfc6238#section-4>`_.
|
||||||
|
|
||||||
|
:param bytes key: The shared secret. A 20-byte string is recommended.
|
||||||
|
:param int step: The time step in seconds. The time-based code changes
|
||||||
|
every ``step`` seconds.
|
||||||
|
:param int t0: The Unix time at which to start counting time steps.
|
||||||
|
:param int digits: The number of decimal digits to generate.
|
||||||
|
:param int drift: The number of time steps to add or remove. Delays and
|
||||||
|
clock differences might mean that you have to look back or forward a
|
||||||
|
step or two in order to match a token.
|
||||||
|
|
||||||
|
:returns: The TOTP token.
|
||||||
|
:rtype: int
|
||||||
|
|
||||||
|
>>> key = b'12345678901234567890'
|
||||||
|
>>> now = int(time())
|
||||||
|
>>> for delta in range(0, 200, 20):
|
||||||
|
... totp(key, t0=(now-delta))
|
||||||
|
755224
|
||||||
|
755224
|
||||||
|
287082
|
||||||
|
359152
|
||||||
|
359152
|
||||||
|
969429
|
||||||
|
338314
|
||||||
|
338314
|
||||||
|
254676
|
||||||
|
287922
|
||||||
|
"""
|
||||||
|
return TOTP(key, step, t0, digits, drift).token()
|
||||||
|
|
||||||
|
|
||||||
|
class TOTP:
|
||||||
|
"""
|
||||||
|
An alternate TOTP interface.
|
||||||
|
|
||||||
|
This provides access to intermediate steps of the computation. This is a
|
||||||
|
living object: the return values of ``t`` and ``token`` will change along
|
||||||
|
with other properties and with the passage of time.
|
||||||
|
|
||||||
|
:param bytes key: The shared secret. A 20-byte string is recommended.
|
||||||
|
:param int step: The time step in seconds. The time-based code changes
|
||||||
|
every ``step`` seconds.
|
||||||
|
:param int t0: The Unix time at which to start counting time steps.
|
||||||
|
:param int digits: The number of decimal digits to generate.
|
||||||
|
:param int drift: The number of time steps to add or remove. Delays and
|
||||||
|
clock differences might mean that you have to look back or forward a
|
||||||
|
step or two in order to match a token.
|
||||||
|
|
||||||
|
>>> key = b'12345678901234567890'
|
||||||
|
>>> totp = TOTP(key)
|
||||||
|
>>> totp.time = 0
|
||||||
|
>>> totp.t()
|
||||||
|
0
|
||||||
|
>>> totp.token()
|
||||||
|
755224
|
||||||
|
>>> totp.time = 30
|
||||||
|
>>> totp.t()
|
||||||
|
1
|
||||||
|
>>> totp.token()
|
||||||
|
287082
|
||||||
|
>>> totp.verify(287082)
|
||||||
|
True
|
||||||
|
>>> totp.verify(359152)
|
||||||
|
False
|
||||||
|
>>> totp.verify(359152, tolerance=1)
|
||||||
|
True
|
||||||
|
>>> totp.drift
|
||||||
|
1
|
||||||
|
>>> totp.drift = 0
|
||||||
|
>>> totp.verify(359152, tolerance=1, min_t=3)
|
||||||
|
False
|
||||||
|
>>> totp.drift
|
||||||
|
0
|
||||||
|
>>> del totp.time
|
||||||
|
>>> totp.t0 = int(time()) - 60
|
||||||
|
>>> totp.t()
|
||||||
|
2
|
||||||
|
>>> totp.token()
|
||||||
|
359152
|
||||||
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments
|
||||||
|
def __init__(self, key: bytes, step=30, t0=0, digits=6, drift=0):
|
||||||
|
self.key = key
|
||||||
|
self.step = step
|
||||||
|
self.t0 = t0
|
||||||
|
self.digits = digits
|
||||||
|
self.drift = drift
|
||||||
|
self._time = None
|
||||||
|
|
||||||
|
def token(self):
|
||||||
|
"""The computed TOTP token."""
|
||||||
|
return hotp(self.key, self.t(), digits=self.digits)
|
||||||
|
|
||||||
|
def t(self):
|
||||||
|
"""The computed time step."""
|
||||||
|
return ((int(self.time) - self.t0) // self.step) + self.drift
|
||||||
|
|
||||||
|
@property
|
||||||
|
def time(self):
|
||||||
|
"""
|
||||||
|
The current time.
|
||||||
|
|
||||||
|
By default, this returns time.time() each time it is accessed. If you
|
||||||
|
want to generate a token at a specific time, you can set this property
|
||||||
|
to a fixed value instead. Deleting the value returns it to its 'live'
|
||||||
|
state.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self._time if (self._time is not None) else time()
|
||||||
|
|
||||||
|
@time.setter
|
||||||
|
def time(self, value):
|
||||||
|
self._time = value
|
||||||
|
|
||||||
|
@time.deleter
|
||||||
|
def time(self):
|
||||||
|
self._time = None
|
||||||
|
|
||||||
|
def verify(self, token, tolerance=0, min_t=None):
|
||||||
|
"""
|
||||||
|
A high-level verification helper.
|
||||||
|
|
||||||
|
:param int token: The provided token.
|
||||||
|
:param int tolerance: The amount of clock drift you're willing to
|
||||||
|
accommodate, in steps. We'll look for the token at t values in
|
||||||
|
[t - tolerance, t + tolerance].
|
||||||
|
:param int min_t: The minimum t value we'll accept. As a rule, this
|
||||||
|
should be one larger than the largest t value of any previously
|
||||||
|
accepted token.
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
Iff this returns True, `self.drift` will be updated to reflect the
|
||||||
|
drift value that was necessary to match the token.
|
||||||
|
|
||||||
|
"""
|
||||||
|
drift_orig = self.drift
|
||||||
|
verified = False
|
||||||
|
|
||||||
|
for offset in range(-tolerance, tolerance + 1):
|
||||||
|
self.drift = drift_orig + offset
|
||||||
|
if (min_t is not None) and (self.t() < min_t):
|
||||||
|
continue
|
||||||
|
if self.token() == token:
|
||||||
|
verified = True
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.drift = drift_orig
|
||||||
|
|
||||||
|
return verified
|
220
authentik/stages/authenticator/tests.py
Normal file
220
authentik/stages/authenticator/tests.py
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
"""Base authenticator tests"""
|
||||||
|
from datetime import timedelta
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
from django.contrib.auth.models import AnonymousUser
|
||||||
|
from django.db import connection
|
||||||
|
from django.test import TestCase, TransactionTestCase
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
from django.utils import timezone
|
||||||
|
from freezegun import freeze_time
|
||||||
|
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.stages.authenticator import match_token, user_has_device, verify_token
|
||||||
|
from authentik.stages.authenticator.models import Device, VerifyNotAllowed
|
||||||
|
|
||||||
|
|
||||||
|
class TestThread(Thread):
|
||||||
|
"Django testing quirk: threads have to close their DB connections."
|
||||||
|
|
||||||
|
__test__ = False
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
super().run()
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
|
||||||
|
class ThrottlingTestMixin:
|
||||||
|
"""
|
||||||
|
Generic tests for throttled devices.
|
||||||
|
|
||||||
|
Any concrete device implementation that uses throttling should define a
|
||||||
|
TestCase subclass that includes this as a base class. This will help verify
|
||||||
|
a correct integration of ThrottlingMixin.
|
||||||
|
|
||||||
|
Subclasses are responsible for populating self.device with a device to test
|
||||||
|
as well as implementing methods to generate tokens to test with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
device: Device
|
||||||
|
|
||||||
|
def valid_token(self):
|
||||||
|
"""Returns a valid token to pass to our device under test."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def invalid_token(self):
|
||||||
|
"""Returns an invalid token to pass to our device under test."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Tests
|
||||||
|
#
|
||||||
|
|
||||||
|
def test_delay_imposed_after_fail(self):
|
||||||
|
"""Test delay imposed after fail"""
|
||||||
|
verified1 = self.device.verify_token(self.invalid_token())
|
||||||
|
self.assertFalse(verified1)
|
||||||
|
verified2 = self.device.verify_token(self.valid_token())
|
||||||
|
self.assertFalse(verified2)
|
||||||
|
|
||||||
|
def test_delay_after_fail_expires(self):
|
||||||
|
"""Test delay after fail expires"""
|
||||||
|
verified1 = self.device.verify_token(self.invalid_token())
|
||||||
|
self.assertFalse(verified1)
|
||||||
|
with freeze_time() as frozen_time:
|
||||||
|
# With default settings initial delay is 1 second
|
||||||
|
frozen_time.tick(delta=timedelta(seconds=1.1))
|
||||||
|
verified2 = self.device.verify_token(self.valid_token())
|
||||||
|
self.assertTrue(verified2)
|
||||||
|
|
||||||
|
def test_throttling_failure_count(self):
|
||||||
|
"""Test throttling failure count"""
|
||||||
|
self.assertEqual(self.device.throttling_failure_count, 0)
|
||||||
|
for _ in range(0, 5):
|
||||||
|
self.device.verify_token(self.invalid_token())
|
||||||
|
# Only the first attempt will increase throttling_failure_count,
|
||||||
|
# the others will all be within 1 second of first
|
||||||
|
# and therefore not count as attempts.
|
||||||
|
self.assertEqual(self.device.throttling_failure_count, 1)
|
||||||
|
|
||||||
|
def test_verify_is_allowed(self):
|
||||||
|
"""Test verify allowed"""
|
||||||
|
# Initially should be allowed
|
||||||
|
verify_is_allowed1, data1 = self.device.verify_is_allowed()
|
||||||
|
self.assertEqual(verify_is_allowed1, True)
|
||||||
|
self.assertEqual(data1, None)
|
||||||
|
|
||||||
|
# After failure, verify is not allowed
|
||||||
|
with freeze_time():
|
||||||
|
self.device.verify_token(self.invalid_token())
|
||||||
|
verify_is_allowed2, data2 = self.device.verify_is_allowed()
|
||||||
|
self.assertEqual(verify_is_allowed2, False)
|
||||||
|
self.assertEqual(
|
||||||
|
data2,
|
||||||
|
{
|
||||||
|
"reason": VerifyNotAllowed.N_FAILED_ATTEMPTS,
|
||||||
|
"failure_count": 1,
|
||||||
|
"locked_until": timezone.now() + timezone.timedelta(seconds=1),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# After a successful attempt, should be allowed again
|
||||||
|
with freeze_time() as frozen_time:
|
||||||
|
frozen_time.tick(delta=timedelta(seconds=1.1))
|
||||||
|
self.device.verify_token(self.valid_token())
|
||||||
|
|
||||||
|
verify_is_allowed3, data3 = self.device.verify_is_allowed()
|
||||||
|
self.assertEqual(verify_is_allowed3, True)
|
||||||
|
self.assertEqual(data3, None)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(OTP_STATIC_THROTTLE_FACTOR=0)
|
||||||
|
class APITestCase(TestCase):
|
||||||
|
"""Test API"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.alice = create_test_admin_user("alice")
|
||||||
|
self.bob = create_test_admin_user("bob")
|
||||||
|
device = self.alice.staticdevice_set.create()
|
||||||
|
self.valid = generate_id(length=16)
|
||||||
|
device.token_set.create(token=self.valid)
|
||||||
|
|
||||||
|
def test_user_has_device(self):
|
||||||
|
"""Test user_has_device"""
|
||||||
|
with self.subTest(user="anonymous"):
|
||||||
|
self.assertFalse(user_has_device(AnonymousUser()))
|
||||||
|
with self.subTest(user="alice"):
|
||||||
|
self.assertTrue(user_has_device(self.alice))
|
||||||
|
with self.subTest(user="bob"):
|
||||||
|
self.assertFalse(user_has_device(self.bob))
|
||||||
|
|
||||||
|
def test_verify_token(self):
|
||||||
|
"""Test verify_token"""
|
||||||
|
device = self.alice.staticdevice_set.first()
|
||||||
|
|
||||||
|
verified = verify_token(self.alice, device.persistent_id, "bogus")
|
||||||
|
self.assertIsNone(verified)
|
||||||
|
|
||||||
|
verified = verify_token(self.alice, device.persistent_id, self.valid)
|
||||||
|
self.assertIsNotNone(verified)
|
||||||
|
|
||||||
|
def test_match_token(self):
|
||||||
|
"""Test match_token"""
|
||||||
|
verified = match_token(self.alice, "bogus")
|
||||||
|
self.assertIsNone(verified)
|
||||||
|
|
||||||
|
verified = match_token(self.alice, self.valid)
|
||||||
|
self.assertEqual(verified, self.alice.staticdevice_set.first())
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(OTP_STATIC_THROTTLE_FACTOR=0)
|
||||||
|
class ConcurrencyTestCase(TransactionTestCase):
|
||||||
|
"""Test concurrent verifications"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.alice = create_test_admin_user("alice")
|
||||||
|
self.bob = create_test_admin_user("bob")
|
||||||
|
self.valid = generate_id(length=16)
|
||||||
|
for user in [self.alice, self.bob]:
|
||||||
|
device = user.staticdevice_set.create()
|
||||||
|
device.token_set.create(token=self.valid)
|
||||||
|
|
||||||
|
def test_verify_token(self):
|
||||||
|
"""Test verify_token in a thread"""
|
||||||
|
|
||||||
|
class VerifyThread(Thread):
|
||||||
|
"""Verifier thread"""
|
||||||
|
|
||||||
|
__test__ = False
|
||||||
|
|
||||||
|
def __init__(self, user, device_id, token):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.user = user
|
||||||
|
self.device_id = device_id
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
self.verified = None
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.verified = verify_token(self.user, self.device_id, self.token)
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
device = self.alice.staticdevice_set.get()
|
||||||
|
threads = [VerifyThread(device.user, device.persistent_id, self.valid) for _ in range(10)]
|
||||||
|
for thread in threads:
|
||||||
|
thread.start()
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
self.assertEqual(sum(1 for t in threads if t.verified is not None), 1)
|
||||||
|
|
||||||
|
def test_match_token(self):
|
||||||
|
"""Test match_token in a thread"""
|
||||||
|
|
||||||
|
class VerifyThread(Thread):
|
||||||
|
"""Verifier thread"""
|
||||||
|
|
||||||
|
__test__ = False
|
||||||
|
|
||||||
|
def __init__(self, user, token):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.user = user
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
self.verified = None
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.verified = match_token(self.user, self.token)
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
threads = [VerifyThread(self.alice, self.valid) for _ in range(10)]
|
||||||
|
for thread in threads:
|
||||||
|
thread.start()
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
self.assertEqual(sum(1 for t in threads if t.verified is not None), 1)
|
86
authentik/stages/authenticator/util.py
Normal file
86
authentik/stages/authenticator/util.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
"""Authenticator utils"""
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from binascii import unhexlify
|
||||||
|
from os import urandom
|
||||||
|
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
def hex_validator(length=0):
|
||||||
|
"""
|
||||||
|
Returns a function to be used as a model validator for a hex-encoded
|
||||||
|
CharField. This is useful for secret keys of all kinds::
|
||||||
|
|
||||||
|
def key_validator(value):
|
||||||
|
return hex_validator(20)(value)
|
||||||
|
|
||||||
|
key = models.CharField(max_length=40,
|
||||||
|
validators=[key_validator], help_text='A hex-encoded 20-byte secret key')
|
||||||
|
|
||||||
|
:param int length: If greater than 0, validation will fail unless the
|
||||||
|
decoded value is exactly this number of bytes.
|
||||||
|
|
||||||
|
:rtype: function
|
||||||
|
|
||||||
|
>>> hex_validator()('0123456789abcdef')
|
||||||
|
>>> hex_validator(8)(b'0123456789abcdef')
|
||||||
|
>>> hex_validator()('phlebotinum') # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: ['phlebotinum is not valid hex-encoded data.']
|
||||||
|
>>> hex_validator(9)('0123456789abcdef') # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: ['0123456789abcdef does not represent exactly 9 bytes.']
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _validator(value):
|
||||||
|
try:
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = value.encode()
|
||||||
|
|
||||||
|
unhexlify(value)
|
||||||
|
except Exception:
|
||||||
|
raise ValidationError("{0} is not valid hex-encoded data.".format(value))
|
||||||
|
|
||||||
|
if (length > 0) and (len(value) != length * 2):
|
||||||
|
raise ValidationError("{0} does not represent exactly {1} bytes.".format(value, length))
|
||||||
|
|
||||||
|
return _validator
|
||||||
|
|
||||||
|
|
||||||
|
def random_hex(length=20):
|
||||||
|
"""
|
||||||
|
Returns a string of random bytes encoded as hex.
|
||||||
|
|
||||||
|
This uses :func:`os.urandom`, so it should be suitable for generating
|
||||||
|
cryptographic keys.
|
||||||
|
|
||||||
|
:param int length: The number of (decoded) bytes to return.
|
||||||
|
|
||||||
|
:returns: A string of hex digits.
|
||||||
|
:rtype: str
|
||||||
|
|
||||||
|
"""
|
||||||
|
return urandom(length).hex()
|
||||||
|
|
||||||
|
|
||||||
|
def random_number_token(length=6):
|
||||||
|
"""
|
||||||
|
Returns a string of random digits encoded as string.
|
||||||
|
|
||||||
|
:param int length: The number of digits to return.
|
||||||
|
|
||||||
|
:returns: A string of decimal digits.
|
||||||
|
:rtype: str
|
||||||
|
|
||||||
|
"""
|
||||||
|
rand = random.SystemRandom()
|
||||||
|
|
||||||
|
if hasattr(rand, "choices"):
|
||||||
|
digits = rand.choices(string.digits, k=length)
|
||||||
|
else:
|
||||||
|
digits = (rand.choice(string.digits) for i in range(length))
|
||||||
|
|
||||||
|
return "".join(digits)
|
@ -5,7 +5,6 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views import View
|
from django.views import View
|
||||||
from django_otp.models import Device
|
|
||||||
from duo_client.admin import Admin
|
from duo_client.admin import Admin
|
||||||
from duo_client.auth import Auth
|
from duo_client.auth import Auth
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
@ -14,6 +13,7 @@ from authentik.core.types import UserSettingSerializer
|
|||||||
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.utils.http import authentik_user_agent
|
from authentik.lib.utils.http import authentik_user_agent
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorDuoStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
class AuthenticatorDuoStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||||
|
@ -6,7 +6,6 @@ from django.contrib.auth import get_user_model
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views import View
|
from django.views import View
|
||||||
from django_otp.models import SideChannelDevice
|
|
||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
@ -21,6 +20,7 @@ from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
|||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.lib.utils.http import get_http_session
|
from authentik.lib.utils.http import get_http_session
|
||||||
|
from authentik.stages.authenticator.models import SideChannelDevice
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
"""AuthenticatorStaticStage API Views"""
|
"""AuthenticatorStaticStage API Views"""
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from django_otp.plugins.otp_static.models import StaticDevice, StaticToken
|
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
from rest_framework.permissions import IsAdminUser
|
from rest_framework.permissions import IsAdminUser
|
||||||
@ -10,7 +9,11 @@ from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
|||||||
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.flows.api.stages import StageSerializer
|
from authentik.flows.api.stages import StageSerializer
|
||||||
from authentik.stages.authenticator_static.models import AuthenticatorStaticStage
|
from authentik.stages.authenticator_static.models import (
|
||||||
|
AuthenticatorStaticStage,
|
||||||
|
StaticDevice,
|
||||||
|
StaticToken,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorStaticStageSerializer(StageSerializer):
|
class AuthenticatorStaticStageSerializer(StageSerializer):
|
||||||
|
@ -0,0 +1,70 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
(
|
||||||
|
"authentik_stages_authenticator_static",
|
||||||
|
"0007_authenticatorstaticstage_token_length_and_more",
|
||||||
|
),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="StaticDevice",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"name",
|
||||||
|
models.CharField(
|
||||||
|
help_text="The human-readable name of this device.", max_length=64
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"confirmed",
|
||||||
|
models.BooleanField(default=True, help_text="Is this device ready for use?"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
help_text="The user that this device belongs to.",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=(models.Model,),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="StaticToken",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("token", models.CharField(max_length=16, db_index=True)),
|
||||||
|
(
|
||||||
|
"device",
|
||||||
|
models.ForeignKey(
|
||||||
|
related_name="token_set",
|
||||||
|
to="authentik_stages_authenticator_static.staticdevice",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={},
|
||||||
|
bases=(models.Model,),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 3.0.5 on 2020-04-16 13:41
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_stages_authenticator_static", "0008_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="staticdevice",
|
||||||
|
name="throttling_failure_count",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0, help_text="Number of successive failed attempts."
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="staticdevice",
|
||||||
|
name="throttling_failure_timestamp",
|
||||||
|
field=models.DateTimeField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="A timestamp of the last failed verification attempt. Null if last attempt succeeded.",
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="staticdevice",
|
||||||
|
options={"verbose_name": "Static device", "verbose_name_plural": "Static devices"},
|
||||||
|
),
|
||||||
|
]
|
@ -1,6 +1,9 @@
|
|||||||
"""Static Authenticator models"""
|
"""Static Authenticator models"""
|
||||||
|
from base64 import b32encode
|
||||||
|
from os import urandom
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views import View
|
from django.views import View
|
||||||
@ -8,6 +11,8 @@ from rest_framework.serializers import BaseSerializer
|
|||||||
|
|
||||||
from authentik.core.types import UserSettingSerializer
|
from authentik.core.types import UserSettingSerializer
|
||||||
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
||||||
|
from authentik.lib.models import SerializerModel
|
||||||
|
from authentik.stages.authenticator.models import Device, ThrottlingMixin
|
||||||
|
|
||||||
|
|
||||||
class AuthenticatorStaticStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
class AuthenticatorStaticStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||||
@ -46,3 +51,76 @@ class AuthenticatorStaticStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
|||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Static Authenticator Stage")
|
verbose_name = _("Static Authenticator Stage")
|
||||||
verbose_name_plural = _("Static Authenticator Stages")
|
verbose_name_plural = _("Static Authenticator Stages")
|
||||||
|
|
||||||
|
|
||||||
|
class StaticDevice(SerializerModel, ThrottlingMixin, Device):
|
||||||
|
"""
|
||||||
|
A static :class:`~authentik.stages.authenticator.models.Device` simply consists of random
|
||||||
|
tokens shared by the database and the user.
|
||||||
|
|
||||||
|
These are frequently used as emergency tokens in case a user's normal
|
||||||
|
device is lost or unavailable. They can be consumed in any order; each
|
||||||
|
token will be removed from the database as soon as it is used.
|
||||||
|
|
||||||
|
This model has no fields of its own, but serves as a container for
|
||||||
|
:class:`StaticToken` objects.
|
||||||
|
|
||||||
|
.. attribute:: token_set
|
||||||
|
|
||||||
|
The RelatedManager for our tokens.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
|
from authentik.stages.authenticator_static.api import StaticDeviceSerializer
|
||||||
|
|
||||||
|
return StaticDeviceSerializer
|
||||||
|
|
||||||
|
def get_throttle_factor(self):
|
||||||
|
return getattr(settings, "OTP_STATIC_THROTTLE_FACTOR", 1)
|
||||||
|
|
||||||
|
def verify_token(self, token):
|
||||||
|
verify_allowed, _ = self.verify_is_allowed()
|
||||||
|
if verify_allowed:
|
||||||
|
match = self.token_set.filter(token=token).first()
|
||||||
|
if match is not None:
|
||||||
|
match.delete()
|
||||||
|
self.throttle_reset()
|
||||||
|
else:
|
||||||
|
self.throttle_increment()
|
||||||
|
else:
|
||||||
|
match = None
|
||||||
|
|
||||||
|
return match is not None
|
||||||
|
|
||||||
|
class Meta(Device.Meta):
|
||||||
|
verbose_name = _("Static device")
|
||||||
|
verbose_name_plural = _("Static devices")
|
||||||
|
|
||||||
|
|
||||||
|
class StaticToken(models.Model):
|
||||||
|
"""
|
||||||
|
A single token belonging to a :class:`StaticDevice`.
|
||||||
|
|
||||||
|
.. attribute:: device
|
||||||
|
|
||||||
|
*ForeignKey*: A foreign key to :class:`StaticDevice`.
|
||||||
|
|
||||||
|
.. attribute:: token
|
||||||
|
|
||||||
|
*CharField*: A random string up to 16 characters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
device = models.ForeignKey(StaticDevice, related_name="token_set", on_delete=models.CASCADE)
|
||||||
|
token = models.CharField(max_length=16, db_index=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def random_token():
|
||||||
|
"""
|
||||||
|
Returns a new random string that can be used as a static token.
|
||||||
|
|
||||||
|
:rtype: bytes
|
||||||
|
|
||||||
|
"""
|
||||||
|
return b32encode(urandom(5)).decode("utf-8").lower()
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
"""Static Authenticator settings"""
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
"django_otp.plugins.otp_static",
|
|
||||||
]
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user