Compare commits
363 Commits
version-20
...
version-20
Author | SHA1 | Date | |
---|---|---|---|
4a1acd377b | |||
c5b84a91d1 | |||
e77ecda3b8 | |||
4e317c10c5 | |||
eb05a3ddb8 | |||
a22d6a0924 | |||
3f0d67779a | |||
0a937ae8e9 | |||
f8d94f3039 | |||
6bb261ac62 | |||
45f2c5bae7 | |||
5d8c1aa0b0 | |||
0101368369 | |||
4854f81592 | |||
4bed6e02e5 | |||
908f123d0e | |||
256dd24a1e | |||
d4284407f9 | |||
80da5dfc52 | |||
b6edf990e0 | |||
a66dcf9382 | |||
9095a840d5 | |||
72259f6479 | |||
0973c74b9d | |||
c7ed4f7ac1 | |||
3d577cf15e | |||
5474a32573 | |||
a5940b88e3 | |||
ff15716012 | |||
c040b13b29 | |||
4915e980c5 | |||
df362dd9ea | |||
d4e4f93cb4 | |||
3af0de6a00 | |||
4f24d61290 | |||
4c5c4dcf2c | |||
660b5cb6c6 | |||
6ff1ea73a9 | |||
3de224690a | |||
d4624b510a | |||
8856d762d0 | |||
5d1cbf14d1 | |||
6d5207f644 | |||
3b6497cd51 | |||
ff7320b0f8 | |||
e5a393c534 | |||
bb4be944dc | |||
21efee8f44 | |||
f61549a60f | |||
0a7bafd1b2 | |||
b3987c5fa0 | |||
0da043a9fe | |||
f336f204cb | |||
3bfcf18492 | |||
dfafe8b43d | |||
b5d43b15f8 | |||
2ccab75021 | |||
9070df6c26 | |||
a1c8ad55ad | |||
872c05c690 | |||
a9528dc1b5 | |||
0e59ade1f2 | |||
5ac49c695d | |||
3a30ecbe76 | |||
1f838bb2aa | |||
cc42830e23 | |||
593eb959ca | |||
5bb6785ad6 | |||
535c11a729 | |||
a0fa8d8524 | |||
c14025c579 | |||
8bc3db7c90 | |||
eaad564e23 | |||
511a94975b | |||
015810a2fd | |||
e70e6b84c2 | |||
d0b9c9a26f | |||
3e403fa348 | |||
48f4a971ef | |||
6314be14ad | |||
1a072c6c39 | |||
ef2eed0bdf | |||
91227b1e96 | |||
67d68629da | |||
e875db8f66 | |||
055a76393d | |||
0754821628 | |||
fca88d9896 | |||
dfe0404c51 | |||
fa61696b46 | |||
e5773738f4 | |||
cac8539d79 | |||
cf600f6f26 | |||
e194715c3e | |||
787f02d5dc | |||
a0ed01a610 | |||
02ba493759 | |||
a7fea5434d | |||
4fb783e953 | |||
affbf85699 | |||
0d92112a3f | |||
b1ad3ec9db | |||
c0601baca6 | |||
057c5c5e9a | |||
05429ab848 | |||
b66d51a699 | |||
f834bc0ff2 | |||
93fd883d7a | |||
7e080d4d68 | |||
3e3ca22d04 | |||
e741caa6b3 | |||
4343246a41 | |||
3f6f83b4b6 | |||
c63e1c9b87 | |||
f44cf06d22 | |||
3f609b8601 | |||
edd89b44a4 | |||
3e58748862 | |||
7088a6b0e6 | |||
6c880e0e62 | |||
cb1e70be7f | |||
6ba150f737 | |||
131769ea73 | |||
e68adbb30d | |||
f1eef09099 | |||
5ab3c7fa9f | |||
d0cec39a0f | |||
e15f53a39a | |||
25fb995663 | |||
eac658c64f | |||
15e2032493 | |||
c87f6cd9d9 | |||
e758995458 | |||
20c284a188 | |||
b0936ea8f3 | |||
bfc0f4a413 | |||
1a9a90cf6a | |||
00f1a6fa48 | |||
33754a06d2 | |||
69b838e1cf | |||
d5e04a2301 | |||
fbf251280f | |||
eaadf62f01 | |||
8c33e7a7c1 | |||
a7d9a80a28 | |||
2ea5dce8d3 | |||
14bf01efe4 | |||
67b24a60e4 | |||
e6775297cb | |||
4e4e2b36b6 | |||
3189c56fc3 | |||
5b5ea47b7a | |||
caa382f898 | |||
2d63488197 | |||
c1c8e4c8d4 | |||
a0e451c5e5 | |||
eaba8006e6 | |||
39ff202f8c | |||
654e0d6245 | |||
ec04443493 | |||
d247c262af | |||
dff49b2bef | |||
50666a76fb | |||
b51a7f9746 | |||
001dfd9f6c | |||
5e4fbeeb25 | |||
2c910bf6ca | |||
9b11319e81 | |||
40dc4b3fb8 | |||
0e37b98968 | |||
7e132eb014 | |||
49dfb4756e | |||
814758e2aa | |||
5c42dac5e2 | |||
88603fa4f7 | |||
0232c4e162 | |||
11753c1fe1 | |||
f5cc6c67ec | |||
8b8ed3527a | |||
1aa0274e7c | |||
ecd33ca0c1 | |||
e93be0de9a | |||
a5adc4f8ed | |||
a6baed9753 | |||
ceaf832e63 | |||
a6b0b14685 | |||
f679250edd | |||
acc4de2235 | |||
56a8276dbf | |||
6dfe6edbef | |||
6af4bd0d9a | |||
7ee7f6bd6a | |||
f8b8334010 | |||
d4b65dc4b4 | |||
e4bbd3b1c0 | |||
87de5e625d | |||
efbe51673e | |||
a95bea53ea | |||
6021fc0f52 | |||
1415b68ff4 | |||
be6853ac52 | |||
7fd6be5abb | |||
91d6f572a5 | |||
016a9ce34e | |||
8adb95af7f | |||
1dc54775d8 | |||
370ef716b5 | |||
16e56ad9ca | |||
b5b5a9eed3 | |||
8b22e7bcc3 | |||
d48b5b9511 | |||
0eccaa3f1e | |||
67d550a80d | |||
ebb5711c32 | |||
79ec872232 | |||
4284e14ff7 | |||
92a09779d0 | |||
14c621631d | |||
c55f503b9b | |||
a908cad976 | |||
c2586557d8 | |||
01c80a82e2 | |||
0d47654651 | |||
1183095833 | |||
c281b11bdc | |||
61fe45a58c | |||
d43aab479c | |||
7f8383427a | |||
a06d6cf33d | |||
5b7cb205c9 | |||
293a932d20 | |||
fff901ff03 | |||
f47c936295 | |||
88d5aec618 | |||
96ae68cf09 | |||
63b3434b6f | |||
947ecec02b | |||
1c2b452406 | |||
47777529ac | |||
949095c376 | |||
4b112c2799 | |||
291a2516b1 | |||
4dcfd021e2 | |||
ca50848db3 | |||
0bb3e3c558 | |||
e4b25809ab | |||
7bf932f8e2 | |||
99d04528b0 | |||
e48d172036 | |||
c2388137a8 | |||
650e2cbc38 | |||
b32800ea71 | |||
e1c0c0b20c | |||
fe39e39dcd | |||
883f213b03 | |||
538996f617 | |||
2f4c92deb9 | |||
ef335ec083 | |||
07b09df3fe | |||
e70e031a1f | |||
c7ba183dc0 | |||
3ed23a37ea | |||
3d724db0e3 | |||
2997542114 | |||
84b18fff96 | |||
1dce408c72 | |||
e5ff47bf14 | |||
b53bf331c3 | |||
90e9a8b34c | |||
845f842783 | |||
7397849c60 | |||
6dd46b5fc5 | |||
89ca79ed10 | |||
713bef895c | |||
925115e9ce | |||
42f5cf8c93 | |||
82cc1d536a | |||
08af2fd46b | |||
70e3b27a4d | |||
6a411d7960 | |||
33567b56d7 | |||
0c1954aeb7 | |||
f4a6c70e98 | |||
5f198e7fe4 | |||
d172d32817 | |||
af3fb5c2cd | |||
885efb526e | |||
3bfb8b2cb2 | |||
9fc5ff4b77 | |||
dd8b579dd6 | |||
e12cbd8711 | |||
62d35f8f8c | |||
49be504c13 | |||
edad55e51d | |||
38086fa8bb | |||
c4f9a3e9a7 | |||
930df791bd | |||
9a6086634c | |||
b68e65355a | |||
72d33a91dd | |||
7067e3d69a | |||
4db370d24e | |||
41e7b9b73f | |||
7f47f93e4e | |||
89abd44b76 | |||
14c7d8c4f4 | |||
525976a81b | |||
64a2126ea4 | |||
994c5882ab | |||
ad64d51e85 | |||
a184a7518a | |||
943fd80920 | |||
01bb18b8c4 | |||
94baaaa5a5 | |||
40b164ce94 | |||
1d7c7801e7 | |||
0db0a12ef3 | |||
8008aba450 | |||
eaeab27004 | |||
111fbf119b | |||
300ad88447 | |||
92cc0c9c64 | |||
18ff803370 | |||
819af78e2b | |||
6338785ce1 | |||
973e151dff | |||
fae6d83f27 | |||
ed84fe0b8d | |||
1ee603403e | |||
7db7b7cc4d | |||
68a98cd86c | |||
e758db5727 | |||
4d7d700afa | |||
f9a5add01d | |||
2986b56389 | |||
58f79b525d | |||
0a1c0dae05 | |||
e18ef8dab6 | |||
3cacc59bec | |||
4eea46d399 | |||
11e25617bd | |||
4817126811 | |||
0181361efa | |||
8ff8e1d5f7 | |||
19d5902a92 | |||
71dffb21a9 | |||
bd283c506d | |||
ef564e5f1a | |||
2543224c7c | |||
077eee9310 | |||
d894eeaa67 | |||
452bfb39bf | |||
6b6702521f | |||
c07b8d95d0 | |||
bf347730b3 | |||
ececfc3a30 | |||
b76546de0c | |||
424d490a60 | |||
127dd85214 | |||
10570ac7f8 | |||
dc5667b0b8 | |||
ec9cacb610 | |||
0027dbc0e5 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.12.5
|
||||
current_version = 2022.2.1
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
|
29
.github/workflows/ci-main.yml
vendored
29
.github/workflows/ci-main.yml
vendored
@ -40,7 +40,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
@ -56,7 +56,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
@ -79,27 +79,22 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
cp -R poetry.lock pyproject.toml ..
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts ../poetry.lock ../pyproject.toml .
|
||||
mv ../.github ../scripts .
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
# Sync anyways since stable will have different dependencies
|
||||
# TODO: Remove after next stable release
|
||||
if [[ -f "Pipfile.lock" ]]; then
|
||||
pipenv install --dev
|
||||
fi
|
||||
# install anyways since stable will have different dependencies
|
||||
poetry install
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
@ -108,13 +103,7 @@ jobs:
|
||||
set -x
|
||||
git fetch
|
||||
git reset --hard HEAD
|
||||
# TODO: Remove after next stable release
|
||||
rm -f poetry.lock
|
||||
git checkout $GITHUB_SHA
|
||||
# TODO: Remove after next stable release
|
||||
if [[ -f "Pipfile.lock" ]]; then
|
||||
pipenv install --dev
|
||||
fi
|
||||
poetry install
|
||||
- name: prepare
|
||||
env:
|
||||
@ -131,7 +120,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
@ -158,7 +147,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
@ -195,7 +184,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
@ -240,7 +229,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
|
18
.github/workflows/ci-outpost.yml
vendored
18
.github/workflows/ci-outpost.yml
vendored
@ -28,11 +28,27 @@ jobs:
|
||||
--rm \
|
||||
-v $(pwd):/app \
|
||||
-w /app \
|
||||
golangci/golangci-lint:v1.39.0 \
|
||||
golangci/golangci-lint:v1.43 \
|
||||
golangci-lint run -v --timeout 200s
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.17"
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
go get github.com/axw/gocov/gocov
|
||||
go get github.com/AlekSi/gocov-xml
|
||||
go get github.com/jstemmer/go-junit-report
|
||||
- name: Go unittests
|
||||
run: |
|
||||
go test -timeout 0 -v -race -coverprofile=coverage.out -covermode=atomic -cover ./... | go-junit-report > junit.xml
|
||||
ci-outpost-mark:
|
||||
needs:
|
||||
- lint-golint
|
||||
- test-unittest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
|
14
.github/workflows/release-publish.yml
vendored
14
.github/workflows/release-publish.yml
vendored
@ -30,14 +30,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.12.5,
|
||||
beryju/authentik:2022.2.1,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.12.5,
|
||||
ghcr.io/goauthentik/server:2022.2.1,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2022.2.1', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
@ -78,14 +78,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-${{ matrix.type }}:2021.12.5,
|
||||
beryju/authentik-${{ matrix.type }}:2022.2.1,
|
||||
beryju/authentik-${{ matrix.type }}:latest,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:2021.12.5,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:2022.2.1,
|
||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.12.5', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2022.2.1', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-${{ matrix.type }}:latest
|
||||
docker tag beryju/authentik-${{ matrix.type }}:latest beryju/authentik-${{ matrix.type }}:stable
|
||||
@ -170,7 +170,7 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.12.5
|
||||
version: authentik@2022.2.1
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
2
.github/workflows/release-tag.yml
vendored
2
.github/workflows/release-tag.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
docker-compose run -u root server test
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v5
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
2
.github/workflows/translation-compile.yml
vendored
2
.github/workflows/translation-compile.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-poetry-cache-v3-${{ hashFiles('**/poetry.lock') }}
|
||||
key: ${{ runner.os }}-poetry-cache-v2-${{ hashFiles('**/poetry.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-poetry.outputs.cache-hit }}
|
||||
|
@ -1 +0,0 @@
|
||||
3.9.7
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -12,7 +12,8 @@
|
||||
"totp",
|
||||
"webauthn",
|
||||
"traefik",
|
||||
"passwordless"
|
||||
"passwordless",
|
||||
"kubernetes"
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
|
@ -16,7 +16,7 @@ ENV NODE_ENV=production
|
||||
RUN cd /work/web && npm i && npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM docker.io/golang:1.17.5-bullseye AS builder
|
||||
FROM docker.io/golang:1.17.7-bullseye AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -32,7 +32,7 @@ COPY ./go.sum /work/go.sum
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 4: Run
|
||||
FROM docker.io/python:3.10.1-slim-bullseye
|
||||
FROM docker.io/python:3.10.2-slim-bullseye
|
||||
|
||||
LABEL org.opencontainers.image.url https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
|
||||
|
3
Makefile
3
Makefile
@ -15,6 +15,9 @@ test-e2e-provider:
|
||||
test-e2e-rest:
|
||||
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test:
|
||||
coverage run manage.py test authentik
|
||||
coverage html
|
||||
|
@ -57,4 +57,4 @@ DigitalOcean provides development and testing resources for authentik.
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Netlify hosts the [goauthentik.io](goauthentik.io) site.
|
||||
Netlify hosts the [goauthentik.io](https://goauthentik.io) site.
|
||||
|
@ -6,8 +6,8 @@
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2021.10.x | :white_check_mark: |
|
||||
| 2021.12.x | :white_check_mark: |
|
||||
| 2022.1.x | :white_check_mark: |
|
||||
| 2022.2.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -1,3 +1,19 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.12.5"
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2022.2.1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
def get_build_hash(fallback: Optional[str] = None) -> str:
|
||||
"""Get build hash"""
|
||||
return environ.get(ENV_GIT_HASH_KEY, fallback if fallback else "")
|
||||
|
||||
|
||||
def get_full_version() -> str:
|
||||
"""Get full version, with build hash appended"""
|
||||
version = __version__
|
||||
if (build_hash := get_build_hash()) != "":
|
||||
version += "." + build_hash
|
||||
return version
|
||||
|
@ -12,10 +12,13 @@ from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class TaskSerializer(PassiveSerializer):
|
||||
"""Serialize TaskInfo and TaskResult"""
|
||||
@ -89,6 +92,7 @@ class TaskViewSet(ViewSet):
|
||||
try:
|
||||
task_module = import_module(task.task_call_module)
|
||||
task_func = getattr(task_module, task.task_call_func)
|
||||
LOGGER.debug("Running task", task=task_func)
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
@ -96,6 +100,7 @@ class TaskViewSet(ViewSet):
|
||||
)
|
||||
return Response(status=204)
|
||||
except (ImportError, AttributeError): # pragma: no cover
|
||||
LOGGER.warning("Failed to run task, remove state", task=task)
|
||||
# if we get an import error, the module path has probably changed
|
||||
task.delete()
|
||||
return Response(status=500)
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""authentik administration overview"""
|
||||
from os import environ
|
||||
|
||||
from django.core.cache import cache
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from packaging.version import parse
|
||||
@ -10,7 +8,7 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
@ -25,7 +23,7 @@ class VersionSerializer(PassiveSerializer):
|
||||
|
||||
def get_build_hash(self, _) -> str:
|
||||
"""Get build hash, if version is not latest or released"""
|
||||
return environ.get(ENV_GIT_HASH_KEY, "")
|
||||
return get_build_hash()
|
||||
|
||||
def get_version_current(self, _) -> str:
|
||||
"""Get current version"""
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""authentik admin app config"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
@ -13,3 +15,4 @@ class AuthentikAdminConfig(AppConfig):
|
||||
from authentik.admin.tasks import clear_update_notifications
|
||||
|
||||
clear_update_notifications.delay()
|
||||
import_module("authentik.admin.signals")
|
||||
|
23
authentik/admin/signals.py
Normal file
23
authentik/admin/signals.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""admin signals"""
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.admin.api.tasks import TaskInfo
|
||||
from authentik.admin.api.workers import GAUGE_WORKERS
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
GAUGE_WORKERS.set(count)
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_tasks(sender, **kwargs):
|
||||
"""Set task gauges"""
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
@ -1,6 +1,5 @@
|
||||
"""authentik admin tasks"""
|
||||
import re
|
||||
from os import environ
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import URLValidator
|
||||
@ -9,7 +8,7 @@ from prometheus_client import Info
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
@ -36,7 +35,7 @@ def _set_prom_info():
|
||||
{
|
||||
"version": __version__,
|
||||
"latest": cache.get(VERSION_CACHE_KEY, ""),
|
||||
"build_hash": environ.get(ENV_GIT_HASH_KEY, ""),
|
||||
"build_hash": get_build_hash(),
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -30,7 +30,7 @@ function getCookie(name) {
|
||||
window.addEventListener('DOMContentLoaded', (event) => {
|
||||
const rapidocEl = document.querySelector('rapi-doc');
|
||||
rapidocEl.addEventListener('before-try', (e) => {
|
||||
e.detail.request.headers.append('X-CSRFToken', getCookie("authentik_csrf"));
|
||||
e.detail.request.headers.append('X-authentik-CSRF', getCookie("authentik_csrf"));
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
@ -4,7 +4,5 @@ from django.urls import include, path
|
||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||
|
||||
urlpatterns = [
|
||||
# TODO: Remove in 2022.1
|
||||
path("v2beta/", include(v3_urls)),
|
||||
path("v3/", include(v3_urls)),
|
||||
]
|
||||
|
@ -1,10 +1,9 @@
|
||||
"""core Configs API"""
|
||||
from os import environ, path
|
||||
from os import path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from rest_framework.fields import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
@ -28,7 +27,6 @@ class Capabilities(models.TextChoices):
|
||||
|
||||
CAN_SAVE_MEDIA = "can_save_media"
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_BACKUP = "can_backup"
|
||||
|
||||
|
||||
class ErrorReportingConfigSerializer(PassiveSerializer):
|
||||
@ -65,13 +63,6 @@ class ConfigView(APIView):
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if SERVICE_HOST_ENV_NAME in environ:
|
||||
# Running in k8s, only s3 backup is supported
|
||||
if CONFIG.y("postgresql.s3_backup"):
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
else:
|
||||
# Running in compose, backup is always supported
|
||||
caps.append(Capabilities.CAN_BACKUP)
|
||||
return caps
|
||||
|
||||
@extend_schema(responses={200: ConfigSerializer(many=False)})
|
||||
@ -80,7 +71,7 @@ class ConfigView(APIView):
|
||||
config = ConfigSerializer(
|
||||
{
|
||||
"error_reporting": {
|
||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
||||
"enabled": CONFIG.y("error_reporting.enabled") and not settings.DEBUG,
|
||||
"environment": CONFIG.y("error_reporting.environment"),
|
||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
||||
|
@ -1,13 +1,16 @@
|
||||
"""Application API Views"""
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import QuerySet
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.functional import SimpleLazyObject
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@ -39,11 +42,26 @@ def user_app_cache_key(user_pk: str) -> str:
|
||||
class ApplicationSerializer(ModelSerializer):
|
||||
"""Application Serializer"""
|
||||
|
||||
launch_url = ReadOnlyField(source="get_launch_url")
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False)
|
||||
|
||||
meta_icon = ReadOnlyField(source="get_meta_icon")
|
||||
|
||||
def get_launch_url(self, app: Application) -> Optional[str]:
|
||||
"""Allow formatting of launch URL"""
|
||||
url = app.get_launch_url()
|
||||
if not url:
|
||||
return url
|
||||
user = self.context["request"].user
|
||||
if isinstance(user, SimpleLazyObject):
|
||||
user._setup()
|
||||
user = user._wrapped
|
||||
try:
|
||||
return url % user.__dict__
|
||||
except ValueError as exc:
|
||||
LOGGER.warning("Failed to format launch url", exc=exc)
|
||||
return url
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Application
|
||||
|
@ -1,10 +1,9 @@
|
||||
"""Tokens API Viewset"""
|
||||
from typing import Any
|
||||
|
||||
from django.http.response import Http404
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from guardian.shortcuts import assign_perm, get_anonymous_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField
|
||||
@ -96,10 +95,12 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
def perform_create(self, serializer: TokenSerializer):
|
||||
if not self.request.user.is_superuser:
|
||||
return serializer.save(
|
||||
instance = serializer.save(
|
||||
user=self.request.user,
|
||||
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
|
||||
)
|
||||
assign_perm("authentik_core.view_token_key", self.request.user, instance)
|
||||
return instance
|
||||
return super().perform_create(serializer)
|
||||
|
||||
@permission_required("authentik_core.view_token_key")
|
||||
@ -114,7 +115,5 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
def view_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Return token key and log access"""
|
||||
token: Token = self.get_object()
|
||||
if token.is_expired:
|
||||
raise Http404
|
||||
Event.new(EventAction.SECRET_VIEW, secret=token).from_http(request) # noqa # nosec
|
||||
return Response(TokenViewSerializer({"key": token.key}).data)
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""authentik core signals"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
@ -11,6 +12,8 @@ from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
# Arguments: user: User, password: str
|
||||
password_changed = Signal()
|
||||
|
||||
@ -20,6 +23,17 @@ if TYPE_CHECKING:
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_models(sender, **kwargs):
|
||||
"""set models gauges"""
|
||||
for model in apps.get_models():
|
||||
GAUGE_MODELS.labels(
|
||||
model_name=model._meta.model_name,
|
||||
app=model._meta.app_label,
|
||||
).set(model.objects.count())
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||
@ -27,11 +41,6 @@ def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||
from authentik.core.api.applications import user_app_cache_key
|
||||
from authentik.core.models import Application
|
||||
|
||||
GAUGE_MODELS.labels(
|
||||
model_name=sender._meta.model_name,
|
||||
app=sender._meta.app_label,
|
||||
).set(sender.objects.count())
|
||||
|
||||
if sender != Application:
|
||||
return
|
||||
if not created: # pragma: no cover
|
||||
|
@ -1,17 +1,7 @@
|
||||
"""authentik core tasks"""
|
||||
from datetime import datetime
|
||||
from io import StringIO
|
||||
from os import environ
|
||||
|
||||
from boto3.exceptions import Boto3Error
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from dbbackup.db.exceptions import CommandConnectorError
|
||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core import management
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import AuthenticatedSession, ExpiringModel
|
||||
@ -21,7 +11,6 @@ from authentik.events.monitored_tasks import (
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -53,46 +42,3 @@ def clean_expired_models(self: MonitoredTask):
|
||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||
|
||||
|
||||
def should_backup() -> bool:
|
||||
"""Check if we should be doing backups"""
|
||||
if SERVICE_HOST_ENV_NAME in environ and not CONFIG.y("postgresql.s3_backup.bucket"):
|
||||
LOGGER.info("Running in k8s and s3 backups are not configured, skipping")
|
||||
return False
|
||||
if not CONFIG.y_bool("postgresql.backup.enabled"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
@prefill_task
|
||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||
"""Database backup"""
|
||||
self.result_timeout_hours = 25
|
||||
if not should_backup():
|
||||
self.set_status(TaskResult(TaskResultStatus.UNKNOWN, ["Backups are not configured."]))
|
||||
return
|
||||
try:
|
||||
start = datetime.now()
|
||||
out = StringIO()
|
||||
management.call_command("dbbackup", quiet=True, stdout=out)
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL,
|
||||
[
|
||||
f"Successfully finished database backup {naturaltime(start)} {out.getvalue()}",
|
||||
],
|
||||
)
|
||||
)
|
||||
LOGGER.info("Successfully backed up database.")
|
||||
except (
|
||||
IOError,
|
||||
BotoCoreError,
|
||||
ClientError,
|
||||
Boto3Error,
|
||||
PermissionError,
|
||||
CommandConnectorError,
|
||||
ValueError,
|
||||
) as exc:
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
|
@ -16,6 +16,7 @@
|
||||
{% block head_before %}
|
||||
{% endblock %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}">
|
||||
<script src="{% static 'dist/poly.js' %}" type="module"></script>
|
||||
{% block head %}
|
||||
{% endblock %}
|
||||
|
@ -13,7 +13,9 @@ class TestApplicationsAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
self.allowed = Application.objects.create(name="allowed", slug="allowed")
|
||||
self.allowed = Application.objects.create(
|
||||
name="allowed", slug="allowed", meta_launch_url="https://goauthentik.io/%(username)s"
|
||||
)
|
||||
self.denied = Application.objects.create(name="denied", slug="denied")
|
||||
PolicyBinding.objects.create(
|
||||
target=self.denied,
|
||||
@ -64,8 +66,8 @@ class TestApplicationsAPI(APITestCase):
|
||||
"slug": "allowed",
|
||||
"provider": None,
|
||||
"provider_obj": None,
|
||||
"launch_url": None,
|
||||
"meta_launch_url": "",
|
||||
"launch_url": f"https://goauthentik.io/{self.user.username}",
|
||||
"meta_launch_url": "https://goauthentik.io/%(username)s",
|
||||
"meta_icon": None,
|
||||
"meta_description": "",
|
||||
"meta_publisher": "",
|
||||
@ -100,8 +102,8 @@ class TestApplicationsAPI(APITestCase):
|
||||
"slug": "allowed",
|
||||
"provider": None,
|
||||
"provider_obj": None,
|
||||
"launch_url": None,
|
||||
"meta_launch_url": "",
|
||||
"launch_url": f"https://goauthentik.io/{self.user.username}",
|
||||
"meta_launch_url": "https://goauthentik.io/%(username)s",
|
||||
"meta_icon": None,
|
||||
"meta_description": "",
|
||||
"meta_publisher": "",
|
||||
|
@ -30,6 +30,7 @@ class TestTokenAPI(APITestCase):
|
||||
self.assertEqual(token.user, self.user)
|
||||
self.assertEqual(token.intent, TokenIntents.INTENT_API)
|
||||
self.assertEqual(token.expiring, True)
|
||||
self.assertTrue(self.user.has_perm("authentik_core.view_token_key", token))
|
||||
|
||||
def test_token_create_invalid(self):
|
||||
"""Test token creation endpoint (invalid data)"""
|
||||
|
@ -17,6 +17,7 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
@ -26,6 +27,8 @@ from authentik.crypto.managed import MANAGED_KEY
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"""CertificateKeyPair Serializer"""
|
||||
@ -76,8 +79,11 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
def validate_certificate_data(self, value: str) -> str:
|
||||
"""Verify that input is a valid PEM x509 Certificate"""
|
||||
try:
|
||||
load_pem_x509_certificate(value.encode("utf-8"), default_backend())
|
||||
except ValueError:
|
||||
# Cast to string to fully load and parse certificate
|
||||
# Prevents issues like https://github.com/goauthentik/authentik/issues/2082
|
||||
str(load_pem_x509_certificate(value.encode("utf-8"), default_backend()))
|
||||
except ValueError as exc:
|
||||
LOGGER.warning("Failed to load certificate", exc=exc)
|
||||
raise ValidationError("Unable to load certificate.")
|
||||
return value
|
||||
|
||||
@ -86,12 +92,17 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
# Since this field is optional, data can be empty.
|
||||
if value != "":
|
||||
try:
|
||||
load_pem_private_key(
|
||||
str.encode("\n".join([x.strip() for x in value.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
# Cast to string to fully load and parse certificate
|
||||
# Prevents issues like https://github.com/goauthentik/authentik/issues/2082
|
||||
str(
|
||||
load_pem_private_key(
|
||||
str.encode("\n".join([x.strip() for x in value.split("\n")])),
|
||||
password=None,
|
||||
backend=default_backend(),
|
||||
)
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
except (ValueError, TypeError) as exc:
|
||||
LOGGER.warning("Failed to load private key", exc=exc)
|
||||
raise ValidationError("Unable to load private key (possibly encrypted?).")
|
||||
return value
|
||||
|
||||
|
@ -1,7 +1,5 @@
|
||||
"""events GeoIP Reader"""
|
||||
from datetime import datetime
|
||||
from os import stat
|
||||
from time import time
|
||||
from typing import Optional, TypedDict
|
||||
|
||||
from geoip2.database import Reader
|
||||
@ -46,14 +44,18 @@ class GeoIPReader:
|
||||
LOGGER.warning("Failed to load GeoIP database", exc=exc)
|
||||
|
||||
def __check_expired(self):
|
||||
"""Check if the geoip database has been opened longer than 8 hours,
|
||||
and re-open it, as it will probably will have been re-downloaded"""
|
||||
now = time()
|
||||
diff = datetime.fromtimestamp(now) - datetime.fromtimestamp(self.__last_mtime)
|
||||
diff_hours = diff.total_seconds() // 3600
|
||||
if diff_hours >= 8:
|
||||
LOGGER.info("GeoIP databased loaded too long, re-opening", diff=diff)
|
||||
self.__open()
|
||||
"""Check if the modification date of the GeoIP database has
|
||||
changed, and reload it if so"""
|
||||
path = CONFIG.y("geoip")
|
||||
try:
|
||||
mtime = stat(path).st_mtime
|
||||
diff = self.__last_mtime < mtime
|
||||
if diff > 0:
|
||||
LOGGER.info("Found new GeoIP Database, reopening", diff=diff)
|
||||
self.__open()
|
||||
except OSError as exc:
|
||||
LOGGER.warning("Failed to check GeoIP age", exc=exc)
|
||||
return
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
|
@ -4,7 +4,7 @@ from typing import Any, Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.http import HttpRequest
|
||||
from prometheus_client import Histogram
|
||||
from prometheus_client import Gauge, Histogram
|
||||
from sentry_sdk.hub import Hub
|
||||
from sentry_sdk.tracing import Span
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
@ -16,7 +16,6 @@ from authentik.flows.markers import ReevaluateMarker, StageMarker
|
||||
from authentik.flows.models import Flow, FlowStageBinding, Stage
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.monitoring import UpdatingGauge
|
||||
|
||||
LOGGER = get_logger()
|
||||
PLAN_CONTEXT_PENDING_USER = "pending_user"
|
||||
@ -27,10 +26,9 @@ PLAN_CONTEXT_SOURCE = "source"
|
||||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||
# was restored.
|
||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||
GAUGE_FLOWS_CACHED = UpdatingGauge(
|
||||
GAUGE_FLOWS_CACHED = Gauge(
|
||||
"authentik_flows_cached",
|
||||
"Cached flows",
|
||||
update_func=lambda: len(cache.keys("flow_*") or []),
|
||||
)
|
||||
HIST_FLOWS_PLAN_TIME = Histogram(
|
||||
"authentik_flows_plan_time",
|
||||
@ -171,7 +169,6 @@ class FlowPlanner:
|
||||
)
|
||||
plan = self._build_plan(user, request, default_context)
|
||||
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||
GAUGE_FLOWS_CACHED.update()
|
||||
if not plan.bindings and not self.allow_empty_flows:
|
||||
raise EmptyFlowException()
|
||||
return plan
|
||||
|
@ -4,6 +4,9 @@ from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.flows.planner import GAUGE_FLOWS_CACHED
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@ -14,6 +17,13 @@ def delete_cache_prefix(prefix: str) -> int:
|
||||
return len(keys)
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_flows(sender, **kwargs):
|
||||
"""set flow gauges"""
|
||||
GAUGE_FLOWS_CACHED.set(len(cache.keys("flow_*") or []))
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
@receiver(pre_delete)
|
||||
# pylint: disable=unused-argument
|
||||
|
@ -118,9 +118,12 @@ class ChallengeStageView(StageView):
|
||||
"""Allow usage of placeholder in flow title."""
|
||||
if not self.executor.plan:
|
||||
return self.executor.flow.title
|
||||
return self.executor.flow.title % {
|
||||
"app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "")
|
||||
}
|
||||
try:
|
||||
return self.executor.flow.title % {
|
||||
"app": self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION, "")
|
||||
}
|
||||
except ValueError:
|
||||
return self.executor.flow.title
|
||||
|
||||
def _get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
with Hub.current.start_span(
|
||||
|
@ -5,16 +5,6 @@ postgresql:
|
||||
user: authentik
|
||||
port: 5432
|
||||
password: 'env://POSTGRES_PASSWORD'
|
||||
backup:
|
||||
enabled: true
|
||||
s3_backup:
|
||||
access_key: ""
|
||||
secret_key: ""
|
||||
bucket: ""
|
||||
region: eu-central-1
|
||||
host: ""
|
||||
location: ""
|
||||
insecure_skip_verify: false
|
||||
|
||||
web:
|
||||
listen: 0.0.0.0:9000
|
||||
@ -65,6 +55,7 @@ outposts:
|
||||
# %(version)s: Current version; 2021.4.1
|
||||
# %(build_hash)s: Build hash if you're running a beta version
|
||||
container_image_base: ghcr.io/goauthentik/%(type)s:%(version)s
|
||||
discover: true
|
||||
|
||||
cookie_domain: null
|
||||
disable_update_check: false
|
||||
|
@ -32,6 +32,7 @@ class BaseEvaluator:
|
||||
self._globals = {
|
||||
"regex_match": BaseEvaluator.expr_regex_match,
|
||||
"regex_replace": BaseEvaluator.expr_regex_replace,
|
||||
"list_flatten": BaseEvaluator.expr_flatten,
|
||||
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||
"ak_logger": get_logger(),
|
||||
@ -40,6 +41,15 @@ class BaseEvaluator:
|
||||
self._context = {}
|
||||
self._filename = "BaseEvalautor"
|
||||
|
||||
@staticmethod
|
||||
def expr_flatten(value: list[Any] | Any) -> Optional[Any]:
|
||||
"""Flatten `value` if its a list"""
|
||||
if isinstance(value, list):
|
||||
if len(value) < 1:
|
||||
return None
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def expr_regex_match(value: Any, regex: str) -> bool:
|
||||
"""Expression Filter to run re.search"""
|
||||
|
6
authentik/lib/merge.py
Normal file
6
authentik/lib/merge.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""merge utils"""
|
||||
from deepmerge import Merger
|
||||
|
||||
MERGE_LIST_UNIQUE = Merger(
|
||||
[(list, ["append_unique"]), (dict, ["merge"]), (set, ["union"])], ["override"], ["override"]
|
||||
)
|
@ -3,8 +3,6 @@ from typing import Optional
|
||||
|
||||
from aioredis.errors import ConnectionClosedError, ReplyError
|
||||
from billiard.exceptions import SoftTimeLimitExceeded, WorkerLostError
|
||||
from botocore.client import ClientError
|
||||
from botocore.exceptions import BotoCoreError
|
||||
from celery.exceptions import CeleryError
|
||||
from channels.middleware import BaseMiddleware
|
||||
from channels_redis.core import ChannelFull
|
||||
@ -81,9 +79,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
WorkerLostError,
|
||||
CeleryError,
|
||||
SoftTimeLimitExceeded,
|
||||
# S3 errors
|
||||
BotoCoreError,
|
||||
ClientError,
|
||||
# custom baseclass
|
||||
SentryIgnoredException,
|
||||
# ldap errors
|
||||
@ -101,8 +96,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
return None
|
||||
if "logger" in event:
|
||||
if event["logger"] in [
|
||||
"dbbackup",
|
||||
"botocore",
|
||||
"kombu",
|
||||
"asyncio",
|
||||
"multiprocessing",
|
||||
@ -111,6 +104,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
"django_redis.cache",
|
||||
"celery.backends.redis",
|
||||
"celery.worker",
|
||||
"paramiko.transport",
|
||||
]:
|
||||
return None
|
||||
LOGGER.debug("sending event to sentry", exc=exc_value, source_logger=event.get("logger", None))
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""http helpers"""
|
||||
from os import environ
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
@ -7,7 +6,7 @@ from requests.sessions import Session
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import get_full_version
|
||||
|
||||
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||
OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
|
||||
@ -75,8 +74,7 @@ def get_client_ip(request: Optional[HttpRequest]) -> str:
|
||||
|
||||
def authentik_user_agent() -> str:
|
||||
"""Get a common user agent"""
|
||||
build = environ.get(ENV_GIT_HASH_KEY, "tagged")
|
||||
return f"authentik@{__version__} (build={build})"
|
||||
return f"authentik@{get_full_version()}"
|
||||
|
||||
|
||||
def get_http_session() -> Session:
|
||||
|
@ -58,4 +58,6 @@ def get_env() -> str:
|
||||
return "compose"
|
||||
if CONFIG.y_bool("debug"):
|
||||
return "dev"
|
||||
if "AK_APPLIANCE" in os.environ:
|
||||
return os.environ["AK_APPLIANCE"]
|
||||
return "custom"
|
||||
|
@ -1,6 +1,4 @@
|
||||
"""Outpost API Views"""
|
||||
from os import environ
|
||||
|
||||
from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from django_filters.filters import ModelMultipleChoiceFilter
|
||||
@ -14,7 +12,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.serializers import JSONField, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY
|
||||
from authentik import get_build_hash
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
@ -154,7 +152,7 @@ class OutpostViewSet(UsedByMixin, ModelViewSet):
|
||||
"version_should": state.version_should,
|
||||
"version_outdated": state.version_outdated,
|
||||
"build_hash": state.build_hash,
|
||||
"build_hash_should": environ.get(ENV_GIT_HASH_KEY, ""),
|
||||
"build_hash_should": get_build_hash(),
|
||||
}
|
||||
)
|
||||
return Response(OutpostHealthSerializer(states, many=True).data)
|
||||
|
@ -55,6 +55,10 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
|
||||
first_msg = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.logger = get_logger()
|
||||
|
||||
def connect(self):
|
||||
super().connect()
|
||||
uuid = self.scope["url_route"]["kwargs"]["pk"]
|
||||
@ -65,7 +69,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
)
|
||||
if not outpost:
|
||||
raise DenyConnection()
|
||||
self.logger = get_logger().bind(outpost=outpost)
|
||||
self.logger = self.logger.bind(outpost=outpost)
|
||||
try:
|
||||
self.accept()
|
||||
except RuntimeError as exc:
|
||||
|
@ -1,12 +1,11 @@
|
||||
"""Base Controller"""
|
||||
from dataclasses import dataclass
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import (
|
||||
@ -102,5 +101,5 @@ class BaseController:
|
||||
return image_name_template % {
|
||||
"type": self.outpost.type,
|
||||
"version": __version__,
|
||||
"build_hash": environ.get(ENV_GIT_HASH_KEY, ""),
|
||||
"build_hash": get_build_hash(),
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ from docker import DockerClient as UpstreamDockerClient
|
||||
from docker.errors import DockerException, NotFound
|
||||
from docker.models.containers import Container
|
||||
from docker.utils.utils import kwargs_from_env
|
||||
from paramiko.ssh_exception import SSHException
|
||||
from structlog.stdlib import get_logger
|
||||
from yaml import safe_dump
|
||||
|
||||
@ -49,10 +50,13 @@ class DockerClient(UpstreamDockerClient, BaseClient):
|
||||
authentication_kp=connection.tls_authentication,
|
||||
)
|
||||
tls_config = self.tls.write()
|
||||
super().__init__(
|
||||
base_url=connection.url,
|
||||
tls=tls_config,
|
||||
)
|
||||
try:
|
||||
super().__init__(
|
||||
base_url=connection.url,
|
||||
tls=tls_config,
|
||||
)
|
||||
except SSHException as exc:
|
||||
raise ServiceConnectionInvalid from exc
|
||||
self.logger = get_logger()
|
||||
# Ensure the client actually works
|
||||
self.containers.list()
|
||||
@ -102,9 +106,12 @@ class DockerController(BaseController):
|
||||
).lower()
|
||||
|
||||
def _get_labels(self) -> dict[str, str]:
|
||||
return {
|
||||
labels = {
|
||||
"io.goauthentik.outpost-uuid": self.outpost.pk.hex,
|
||||
}
|
||||
if self.outpost.config.docker_labels:
|
||||
labels.update(self.outpost.config.docker_labels)
|
||||
return labels
|
||||
|
||||
def _get_env(self) -> dict[str, str]:
|
||||
return {
|
||||
|
@ -18,6 +18,7 @@ from kubernetes.client import (
|
||||
V1SecretKeySelector,
|
||||
)
|
||||
|
||||
from authentik import __version__, get_full_version
|
||||
from authentik.outposts.controllers.base import FIELD_MANAGER
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
|
||||
@ -52,15 +53,18 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
||||
raise NeedsUpdate()
|
||||
super().reconcile(current, reference)
|
||||
|
||||
def get_pod_meta(self) -> dict[str, str]:
|
||||
def get_pod_meta(self, **kwargs) -> dict[str, str]:
|
||||
"""Get common object metadata"""
|
||||
return {
|
||||
"app.kubernetes.io/name": "authentik-outpost",
|
||||
"app.kubernetes.io/managed-by": "goauthentik.io",
|
||||
"goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||
"goauthentik.io/outpost-name": slugify(self.controller.outpost.name),
|
||||
"goauthentik.io/outpost-type": str(self.controller.outpost.type),
|
||||
}
|
||||
kwargs.update(
|
||||
{
|
||||
"app.kubernetes.io/name": f"authentik-outpost-{self.outpost.type}",
|
||||
"app.kubernetes.io/managed-by": "goauthentik.io",
|
||||
"goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||
"goauthentik.io/outpost-name": slugify(self.controller.outpost.name),
|
||||
"goauthentik.io/outpost-type": str(self.controller.outpost.type),
|
||||
}
|
||||
)
|
||||
return kwargs
|
||||
|
||||
def get_reference_object(self) -> V1Deployment:
|
||||
"""Get deployment object for outpost"""
|
||||
@ -77,13 +81,24 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
image_name = self.controller.get_container_image()
|
||||
image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets
|
||||
version = get_full_version()
|
||||
return V1Deployment(
|
||||
metadata=meta,
|
||||
spec=V1DeploymentSpec(
|
||||
replicas=self.outpost.config.kubernetes_replicas,
|
||||
selector=V1LabelSelector(match_labels=self.get_pod_meta()),
|
||||
template=V1PodTemplateSpec(
|
||||
metadata=V1ObjectMeta(labels=self.get_pod_meta()),
|
||||
metadata=V1ObjectMeta(
|
||||
labels=self.get_pod_meta(
|
||||
**{
|
||||
# Support istio-specific labels, but also use the standard k8s
|
||||
# recommendations
|
||||
"app.kubernetes.io/version": version,
|
||||
"app": "authentik-outpost",
|
||||
"version": version,
|
||||
}
|
||||
)
|
||||
),
|
||||
spec=V1PodSpec(
|
||||
image_pull_secrets=[
|
||||
V1ObjectReference(name=secret) for secret in image_pull_secrets
|
||||
|
@ -6,6 +6,7 @@ from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec
|
||||
from authentik.outposts.controllers.base import FIELD_MANAGER
|
||||
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
|
||||
from authentik.outposts.controllers.k8s.utils import compare_ports
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -25,6 +26,8 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
||||
# after an authentik update. However the ports might have also changed during
|
||||
# the update, so this causes the service to be re-created with higher
|
||||
# priority than being updated.
|
||||
if current.spec.selector != reference.spec.selector:
|
||||
raise NeedsUpdate()
|
||||
super().reconcile(current, reference)
|
||||
|
||||
def get_reference_object(self) -> V1Service:
|
||||
|
@ -2,6 +2,7 @@
|
||||
from pathlib import Path
|
||||
|
||||
from kubernetes.client.models.v1_container_port import V1ContainerPort
|
||||
from kubernetes.client.models.v1_service_port import V1ServicePort
|
||||
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||
|
||||
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate
|
||||
@ -16,10 +17,31 @@ def get_namespace() -> str:
|
||||
return "default"
|
||||
|
||||
|
||||
def compare_ports(current: list[V1ContainerPort], reference: list[V1ContainerPort]):
|
||||
def compare_port(
|
||||
current: V1ServicePort | V1ContainerPort, reference: V1ServicePort | V1ContainerPort
|
||||
) -> bool:
|
||||
"""Compare a single port"""
|
||||
if current.name != reference.name:
|
||||
return False
|
||||
if current.protocol != reference.protocol:
|
||||
return False
|
||||
if isinstance(current, V1ServicePort) and isinstance(reference, V1ServicePort):
|
||||
# We only care about the target port
|
||||
if current.target_port != reference.target_port:
|
||||
return False
|
||||
if isinstance(current, V1ContainerPort) and isinstance(reference, V1ContainerPort):
|
||||
# We only care about the target port
|
||||
if current.container_port != reference.container_port:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def compare_ports(
|
||||
current: list[V1ServicePort | V1ContainerPort], reference: list[V1ServicePort | V1ContainerPort]
|
||||
):
|
||||
"""Compare ports of a list"""
|
||||
if len(current) != len(reference):
|
||||
raise NeedsRecreate()
|
||||
for port in reference:
|
||||
if port not in current:
|
||||
if not any(compare_port(port, current_port) for current_port in current):
|
||||
raise NeedsRecreate()
|
||||
|
@ -3,6 +3,8 @@ import os
|
||||
from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
|
||||
from docker.errors import DockerException
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
|
||||
HEADER = "### Managed by authentik"
|
||||
@ -27,6 +29,8 @@ class DockerInlineSSH:
|
||||
def __init__(self, host: str, keypair: CertificateKeyPair) -> None:
|
||||
self.host = host
|
||||
self.keypair = keypair
|
||||
if not self.keypair:
|
||||
raise DockerException("keypair must be set for SSH connections")
|
||||
self.config_path = Path("~/.ssh/config").expanduser()
|
||||
self.header = f"{HEADER} - {self.host}\n"
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Outpost models"""
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
from os import environ
|
||||
from typing import Iterable, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
@ -17,7 +16,7 @@ from model_utils.managers import InheritanceManager
|
||||
from packaging.version import LegacyVersion, Version, parse
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
||||
USER_ATTRIBUTE_SA,
|
||||
@ -61,6 +60,7 @@ class OutpostConfig:
|
||||
|
||||
docker_network: Optional[str] = field(default=None)
|
||||
docker_map_ports: bool = field(default=True)
|
||||
docker_labels: Optional[dict[str, str]] = field(default=None)
|
||||
|
||||
container_image: Optional[str] = field(default=None)
|
||||
|
||||
@ -414,7 +414,7 @@ class OutpostState:
|
||||
"""Check if outpost version matches our version"""
|
||||
if not self.version:
|
||||
return False
|
||||
if self.build_hash != environ.get(ENV_GIT_HASH_KEY, ""):
|
||||
if self.build_hash != get_build_hash():
|
||||
return False
|
||||
return parse(self.version) < OUR_VERSION
|
||||
|
||||
|
@ -23,6 +23,7 @@ from authentik.events.monitored_tasks import (
|
||||
TaskResultStatus,
|
||||
prefill_task,
|
||||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.reflection import path_to_class
|
||||
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||
from authentik.outposts.controllers.docker import DockerClient
|
||||
@ -77,8 +78,12 @@ def outpost_service_connection_state(connection_pk: Any):
|
||||
cls = DockerClient
|
||||
if isinstance(connection, KubernetesServiceConnection):
|
||||
cls = KubernetesClient
|
||||
with cls(connection) as client:
|
||||
state = client.fetch_state()
|
||||
try:
|
||||
with cls(connection) as client:
|
||||
state = client.fetch_state()
|
||||
except ServiceConnectionInvalid as exc:
|
||||
LOGGER.warning("Failed to get client status", exc=exc)
|
||||
return
|
||||
cache.set(connection.state_key, state, timeout=None)
|
||||
|
||||
|
||||
@ -227,6 +232,9 @@ def _outpost_single_update(outpost: Outpost, layer=None):
|
||||
@CELERY_APP.task()
|
||||
def outpost_local_connection():
|
||||
"""Checks the local environment and create Service connections."""
|
||||
if not CONFIG.y_bool("outposts.discover"):
|
||||
LOGGER.debug("outpost integration discovery is disabled")
|
||||
return
|
||||
# Explicitly check against token filename, as that's
|
||||
# only present when the integration is enabled
|
||||
if Path(SERVICE_TOKEN_FILENAME).exists():
|
||||
|
@ -5,7 +5,7 @@ from typing import Iterator, Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.http import HttpRequest
|
||||
from prometheus_client import Histogram
|
||||
from prometheus_client import Gauge, Histogram
|
||||
from sentry_sdk.hub import Hub
|
||||
from sentry_sdk.tracing import Span
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
@ -14,13 +14,11 @@ from authentik.core.models import User
|
||||
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
|
||||
from authentik.policies.process import PolicyProcess, cache_key
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.root.monitoring import UpdatingGauge
|
||||
|
||||
CURRENT_PROCESS = current_process()
|
||||
GAUGE_POLICIES_CACHED = UpdatingGauge(
|
||||
GAUGE_POLICIES_CACHED = Gauge(
|
||||
"authentik_policies_cached",
|
||||
"Cached Policies",
|
||||
update_func=lambda: len(cache.keys("policy_*") or []),
|
||||
)
|
||||
HIST_POLICIES_BUILD_TIME = Histogram(
|
||||
"authentik_policies_build_time",
|
||||
|
@ -45,7 +45,7 @@ class HaveIBeenPwendPolicy(Policy):
|
||||
fields=request.context.keys(),
|
||||
)
|
||||
return PolicyResult(False, _("Password not set in context"))
|
||||
password = request.context[self.password_field]
|
||||
password = str(request.context[self.password_field])
|
||||
|
||||
pw_hash = sha1(password.encode("utf-8")).hexdigest() # nosec
|
||||
url = f"https://api.pwnedpasswords.com/range/{pw_hash[:5]}"
|
||||
|
@ -5,10 +5,19 @@ from django.dispatch import receiver
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.applications import user_app_cache_key
|
||||
from authentik.policies.engine import GAUGE_POLICIES_CACHED
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
# pylint: disable=unused-argument
|
||||
def monitoring_set_policies(sender, **kwargs):
|
||||
"""set policy gauges"""
|
||||
GAUGE_POLICIES_CACHED.set(len(cache.keys("policy_*") or []))
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def invalidate_policy_cache(sender, instance, **_):
|
||||
|
@ -45,6 +45,13 @@ class GrantTypes(models.TextChoices):
|
||||
HYBRID = "hybrid"
|
||||
|
||||
|
||||
class ResponseMode(models.TextChoices):
|
||||
"""https://openid.net/specs/oauth-v2-multiple-response-types-1_0.html#OAuth.Post"""
|
||||
|
||||
QUERY = "query"
|
||||
FRAGMENT = "fragment"
|
||||
|
||||
|
||||
class SubModes(models.TextChoices):
|
||||
"""Mode after which 'sub' attribute is generateed, for compatibility reasons"""
|
||||
|
||||
|
@ -43,7 +43,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
name="test",
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="http://local.invalid",
|
||||
redirect_uris="http://local.invalid/Foo",
|
||||
)
|
||||
with self.assertRaises(AuthorizeError):
|
||||
request = self.factory.get(
|
||||
@ -51,7 +51,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
"request": "foo",
|
||||
},
|
||||
)
|
||||
@ -105,26 +105,30 @@ class TestAuthorize(OAuthTestCase):
|
||||
name="test",
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="http://local.invalid",
|
||||
redirect_uris="http://local.invalid/Foo",
|
||||
)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
OAuthAuthorizationParams.from_request(request).grant_type,
|
||||
GrantTypes.AUTHORIZATION_CODE,
|
||||
)
|
||||
self.assertEqual(
|
||||
OAuthAuthorizationParams.from_request(request).redirect_uri,
|
||||
"http://local.invalid/Foo",
|
||||
)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "id_token",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
"scope": "openid",
|
||||
"state": "foo",
|
||||
},
|
||||
@ -140,7 +144,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
data={
|
||||
"response_type": "id_token",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
"state": "foo",
|
||||
},
|
||||
)
|
||||
@ -153,7 +157,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
data={
|
||||
"response_type": "code token",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
"scope": "openid",
|
||||
"state": "foo",
|
||||
},
|
||||
@ -167,7 +171,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
data={
|
||||
"response_type": "invalid",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://local.invalid",
|
||||
"redirect_uri": "http://local.invalid/Foo",
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
|
@ -44,6 +44,7 @@ from authentik.providers.oauth2.models import (
|
||||
AuthorizationCode,
|
||||
GrantTypes,
|
||||
OAuth2Provider,
|
||||
ResponseMode,
|
||||
ResponseTypes,
|
||||
)
|
||||
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
||||
@ -153,16 +154,26 @@ class OAuthAuthorizationParams:
|
||||
def check_redirect_uri(self):
|
||||
"""Redirect URI validation."""
|
||||
allowed_redirect_urls = self.provider.redirect_uris.split()
|
||||
if not self.redirect_uri:
|
||||
# We don't want to actually lowercase the final URL we redirect to,
|
||||
# we only lowercase it for comparison
|
||||
redirect_uri = self.redirect_uri.lower()
|
||||
if not redirect_uri:
|
||||
LOGGER.warning("Missing redirect uri.")
|
||||
raise RedirectUriError("", allowed_redirect_urls)
|
||||
if len(allowed_redirect_urls) < 1:
|
||||
|
||||
if self.provider.redirect_uris == "":
|
||||
LOGGER.info("Setting redirect for blank redirect_uris", redirect=self.redirect_uri)
|
||||
self.provider.redirect_uris = self.redirect_uri
|
||||
self.provider.save()
|
||||
allowed_redirect_urls = self.provider.redirect_uris.split()
|
||||
|
||||
if self.provider.redirect_uris == "*":
|
||||
LOGGER.warning(
|
||||
"Provider has no allowed redirect_uri set, allowing all.",
|
||||
allow=self.redirect_uri.lower(),
|
||||
"Provider has wildcard allowed redirect_uri set, allowing all.",
|
||||
allow=self.redirect_uri,
|
||||
)
|
||||
return
|
||||
if self.redirect_uri.lower() not in [x.lower() for x in allowed_redirect_urls]:
|
||||
if redirect_uri not in [x.lower() for x in allowed_redirect_urls]:
|
||||
LOGGER.warning(
|
||||
"Invalid redirect uri",
|
||||
redirect_uri=self.redirect_uri,
|
||||
@ -292,13 +303,23 @@ class OAuthFulfillmentStage(StageView):
|
||||
code = self.params.create_code(self.request)
|
||||
code.save(force_insert=True)
|
||||
|
||||
if self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
||||
query_dict = self.request.POST if self.request.method == "POST" else self.request.GET
|
||||
response_mode = ResponseMode.QUERY
|
||||
# Get response mode from url param, otherwise decide based on grant type
|
||||
if "response_mode" in query_dict:
|
||||
response_mode = query_dict["response_mode"]
|
||||
elif self.params.grant_type == GrantTypes.AUTHORIZATION_CODE:
|
||||
response_mode = ResponseMode.QUERY
|
||||
elif self.params.grant_type in [GrantTypes.IMPLICIT, GrantTypes.HYBRID]:
|
||||
response_mode = ResponseMode.FRAGMENT
|
||||
|
||||
if response_mode == ResponseMode.QUERY:
|
||||
query_params["code"] = code.code
|
||||
query_params["state"] = [str(self.params.state) if self.params.state else ""]
|
||||
|
||||
uri = uri._replace(query=urlencode(query_params, doseq=True))
|
||||
return urlunsplit(uri)
|
||||
if self.params.grant_type in [GrantTypes.IMPLICIT, GrantTypes.HYBRID]:
|
||||
if response_mode == ResponseMode.FRAGMENT:
|
||||
query_fragment = self.create_implicit_response(code)
|
||||
|
||||
uri = uri._replace(
|
||||
|
@ -66,7 +66,7 @@ class TokenParams:
|
||||
provider=provider,
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
redirect_uri=request.POST.get("redirect_uri", ""),
|
||||
redirect_uri=request.POST.get("redirect_uri", "").lower(),
|
||||
grant_type=request.POST.get("grant_type", ""),
|
||||
state=request.POST.get("state", ""),
|
||||
scope=request.POST.get("scope", "").split(),
|
||||
@ -123,21 +123,23 @@ class TokenParams:
|
||||
LOGGER.warning("Invalid grant type", grant_type=self.grant_type)
|
||||
raise TokenError("unsupported_grant_type")
|
||||
|
||||
def __post_init_code(self, raw_code):
|
||||
def __post_init_code(self, raw_code: str):
|
||||
if not raw_code:
|
||||
LOGGER.warning("Missing authorization code")
|
||||
raise TokenError("invalid_grant")
|
||||
|
||||
allowed_redirect_urls = self.provider.redirect_uris.split()
|
||||
if len(allowed_redirect_urls) < 1:
|
||||
if self.provider.redirect_uris == "*":
|
||||
LOGGER.warning(
|
||||
"Provider has no allowed redirect_uri set, allowing all.",
|
||||
allow=self.redirect_uri.lower(),
|
||||
"Provider has wildcard allowed redirect_uri set, allowing all.",
|
||||
redirect=self.redirect_uri,
|
||||
)
|
||||
elif self.redirect_uri.lower() not in [x.lower() for x in allowed_redirect_urls]:
|
||||
# At this point, no provider should have a blank redirect_uri, in case they do
|
||||
# this will check an empty array and raise an error
|
||||
elif self.redirect_uri not in [x.lower() for x in allowed_redirect_urls]:
|
||||
LOGGER.warning(
|
||||
"Invalid redirect uri",
|
||||
uri=self.redirect_uri,
|
||||
redirect=self.redirect_uri,
|
||||
expected=self.provider.redirect_uris.split(),
|
||||
)
|
||||
raise TokenError("invalid_client")
|
||||
|
@ -12,4 +12,8 @@ class AuthentikProviderProxyConfig(AppConfig):
|
||||
verbose_name = "authentik Providers.Proxy"
|
||||
|
||||
def ready(self) -> None:
|
||||
from authentik.providers.proxy.tasks import proxy_set_defaults
|
||||
|
||||
import_module("authentik.providers.proxy.managed")
|
||||
|
||||
proxy_set_defaults.delay()
|
||||
|
@ -23,15 +23,17 @@ class ProxyDockerController(DockerController):
|
||||
proxy_provider: ProxyProvider
|
||||
external_host_name = urlparse(proxy_provider.external_host)
|
||||
hosts.append(f"`{external_host_name.netloc}`")
|
||||
traefik_name = f"ak-outpost-{self.outpost.pk.hex}"
|
||||
traefik_name = self.name
|
||||
labels = super()._get_labels()
|
||||
labels["traefik.enable"] = "true"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = f"Host({','.join(hosts)})"
|
||||
labels[
|
||||
f"traefik.http.routers.{traefik_name}-router.rule"
|
||||
] = f"Host({','.join(hosts)}) && PathPrefix(`/outpost.goauthentik.io`)"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.service"] = f"{traefik_name}-service"
|
||||
labels[
|
||||
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"
|
||||
] = "/akprox/ping"
|
||||
] = "/outpost.goauthentik.io/ping"
|
||||
labels[
|
||||
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.port"
|
||||
] = "9300"
|
||||
|
@ -92,6 +92,8 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
||||
# Buffer sizes for large headers with JWTs
|
||||
"nginx.ingress.kubernetes.io/proxy-buffers-number": "4",
|
||||
"nginx.ingress.kubernetes.io/proxy-buffer-size": "16k",
|
||||
# Enable TLS in traefik
|
||||
"traefik.ingress.kubernetes.io/router.tls": "true",
|
||||
}
|
||||
annotations.update(self.controller.outpost.config.kubernetes_ingress_annotations)
|
||||
return annotations
|
||||
@ -126,7 +128,7 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
||||
port=V1ServiceBackendPort(name="http"),
|
||||
),
|
||||
),
|
||||
path="/akprox",
|
||||
path="/outpost.goauthentik.io",
|
||||
path_type="ImplementationSpecific",
|
||||
)
|
||||
]
|
||||
|
@ -119,15 +119,11 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware])
|
||||
),
|
||||
spec=TraefikMiddlewareSpec(
|
||||
forwardAuth=TraefikMiddlewareSpecForwardAuth(
|
||||
address=f"http://{self.name}.{self.namespace}:9000/akprox/auth/traefik",
|
||||
address=(
|
||||
f"http://{self.name}.{self.namespace}:9000/"
|
||||
"outpost.goauthentik.io/auth/traefik"
|
||||
),
|
||||
authResponseHeaders=[
|
||||
# Legacy headers, remove after 2022.1
|
||||
"X-Auth-Username",
|
||||
"X-Auth-Groups",
|
||||
"X-Forwarded-Email",
|
||||
"X-Forwarded-Preferred-Username",
|
||||
"X-Forwarded-User",
|
||||
# New headers, unique prefix
|
||||
"X-authentik-username",
|
||||
"X-authentik-groups",
|
||||
"X-authentik-email",
|
||||
|
@ -27,7 +27,7 @@ def get_cookie_secret():
|
||||
|
||||
|
||||
def _get_callback_url(uri: str) -> str:
|
||||
return urljoin(uri, "/akprox/callback")
|
||||
return urljoin(uri, "outpost.goauthentik.io/callback")
|
||||
|
||||
|
||||
class ProxyMode(models.TextChoices):
|
||||
|
11
authentik/providers/proxy/tasks.py
Normal file
11
authentik/providers/proxy/tasks.py
Normal file
@ -0,0 +1,11 @@
|
||||
"""proxy provider tasks"""
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def proxy_set_defaults():
|
||||
"""Ensure correct defaults are set for all providers"""
|
||||
for provider in ProxyProvider.objects.all():
|
||||
provider.set_oauth_defaults()
|
||||
provider.save()
|
@ -15,6 +15,7 @@ from authentik.providers.saml.processors.request_parser import AuthNRequestParse
|
||||
from authentik.sources.saml.exceptions import MismatchedRequestID
|
||||
from authentik.sources.saml.models import SAMLSource
|
||||
from authentik.sources.saml.processors.constants import (
|
||||
SAML_BINDING_REDIRECT,
|
||||
SAML_NAME_ID_FORMAT_EMAIL,
|
||||
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||
)
|
||||
@ -98,6 +99,9 @@ class TestAuthNRequest(TestCase):
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
auth_n = request_proc.get_auth_n()
|
||||
self.assertEqual(auth_n.attrib["ProtocolBinding"], SAML_BINDING_REDIRECT)
|
||||
|
||||
request = request_proc.build_auth_n()
|
||||
# Now we check the ID and signature
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
|
@ -1,37 +1,17 @@
|
||||
"""Metrics view"""
|
||||
from base64 import b64encode
|
||||
from typing import Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connections
|
||||
from django.db.utils import OperationalError
|
||||
from django.dispatch import Signal
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.views import View
|
||||
from django_prometheus.exports import ExportToDjangoView
|
||||
from django_redis import get_redis_connection
|
||||
from prometheus_client import Gauge
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from authentik.admin.api.workers import GAUGE_WORKERS
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
|
||||
class UpdatingGauge(Gauge):
|
||||
"""Gauge which fetches its own value from an update function.
|
||||
|
||||
Update function is called on instantiate"""
|
||||
|
||||
def __init__(self, *args, update_func: Callable, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._update_func = update_func
|
||||
self.update()
|
||||
|
||||
def update(self):
|
||||
"""Set value from update function"""
|
||||
val = self._update_func()
|
||||
if val:
|
||||
self.set(val)
|
||||
monitoring_set = Signal()
|
||||
|
||||
|
||||
class MetricsView(View):
|
||||
@ -49,11 +29,7 @@ class MetricsView(View):
|
||||
response["WWW-Authenticate"] = 'Basic realm="authentik-monitoring"'
|
||||
return response
|
||||
|
||||
count = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
GAUGE_WORKERS.set(count)
|
||||
|
||||
for task in TaskInfo.all().values():
|
||||
task.set_prom_metrics()
|
||||
monitoring_set.send_robust(self)
|
||||
|
||||
return ExportToDjangoView(request)
|
||||
|
||||
|
@ -1,14 +1,4 @@
|
||||
"""
|
||||
Django settings for authentik project.
|
||||
|
||||
Generated by 'django-admin startproject' using Django 2.1.3.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/2.1/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/2.1/ref/settings/
|
||||
"""
|
||||
"""root settings for authentik"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
@ -16,26 +6,23 @@ import os
|
||||
import sys
|
||||
from hashlib import sha512
|
||||
from json import dumps
|
||||
from tempfile import gettempdir
|
||||
from time import time
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import structlog
|
||||
from celery.schedules import crontab
|
||||
from sentry_sdk import init as sentry_init
|
||||
from sentry_sdk.api import set_tag
|
||||
from sentry_sdk.integrations.boto3 import Boto3Integration
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
from sentry_sdk.integrations.threading import ThreadingIntegration
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__, get_build_hash
|
||||
from authentik.core.middleware import structlog_add_request_id
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.logging import add_process_id
|
||||
from authentik.lib.sentry import before_send
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||
|
||||
@ -75,6 +62,7 @@ AUTH_USER_MODEL = "authentik_core.User"
|
||||
|
||||
_cookie_suffix = "_debug" if DEBUG else ""
|
||||
CSRF_COOKIE_NAME = "authentik_csrf"
|
||||
CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF"
|
||||
LANGUAGE_COOKIE_NAME = f"authentik_language{_cookie_suffix}"
|
||||
SESSION_COOKIE_NAME = f"authentik_session{_cookie_suffix}"
|
||||
SESSION_COOKIE_DOMAIN = CONFIG.y("cookie_domain", None)
|
||||
@ -148,7 +136,6 @@ INSTALLED_APPS = [
|
||||
"guardian",
|
||||
"django_prometheus",
|
||||
"channels",
|
||||
"dbbackup",
|
||||
]
|
||||
|
||||
GUARDIAN_MONKEY_PATCH = False
|
||||
@ -164,9 +151,6 @@ SPECTACULAR_SETTINGS = {
|
||||
{
|
||||
"url": "/api/v3/",
|
||||
},
|
||||
{
|
||||
"url": "/api/v2beta/",
|
||||
},
|
||||
],
|
||||
"CONTACT": {
|
||||
"email": "hello@beryju.org",
|
||||
@ -222,7 +206,7 @@ if CONFIG.y_bool("redis.tls", False):
|
||||
REDIS_CELERY_TLS_REQUIREMENTS = f"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}"
|
||||
_redis_url = (
|
||||
f"{REDIS_PROTOCOL_PREFIX}:"
|
||||
f"{quote(CONFIG.y('redis.password'))}@{quote(CONFIG.y('redis.host'))}:"
|
||||
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
|
||||
f"{int(CONFIG.y('redis.port'))}"
|
||||
)
|
||||
|
||||
@ -349,6 +333,7 @@ LOCALE_PATHS = ["./locale"]
|
||||
# Celery settings
|
||||
# Add a 10 minute timeout to all Celery tasks.
|
||||
CELERY_TASK_SOFT_TIME_LIMIT = 600
|
||||
CELERY_WORKER_MAX_TASKS_PER_CHILD = 50
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"clean_expired_models": {
|
||||
"task": "authentik.core.tasks.clean_expired_models",
|
||||
@ -357,7 +342,7 @@ CELERY_BEAT_SCHEDULE = {
|
||||
},
|
||||
"db_backup": {
|
||||
"task": "authentik.core.tasks.backup_database",
|
||||
"schedule": crontab(hour="*/24"),
|
||||
"schedule": crontab(hour="*/24", minute=0),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@ -370,38 +355,8 @@ CELERY_RESULT_BACKEND = (
|
||||
f"{_redis_url}/{CONFIG.y('redis.message_queue_db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
||||
)
|
||||
|
||||
# Database backup
|
||||
DBBACKUP_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||
DBBACKUP_STORAGE_OPTIONS = {"location": "./backups" if DEBUG else "/backups"}
|
||||
DBBACKUP_FILENAME_TEMPLATE = f"authentik-backup-{__version__}-{{datetime}}.sql"
|
||||
DBBACKUP_CONNECTOR_MAPPING = {
|
||||
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
||||
}
|
||||
DBBACKUP_TMP_DIR = gettempdir() if DEBUG else "/tmp" # nosec
|
||||
DBBACKUP_CLEANUP_KEEP = 30
|
||||
if CONFIG.y("postgresql.s3_backup.bucket", "") != "":
|
||||
DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
DBBACKUP_STORAGE_OPTIONS = {
|
||||
"access_key": CONFIG.y("postgresql.s3_backup.access_key"),
|
||||
"secret_key": CONFIG.y("postgresql.s3_backup.secret_key"),
|
||||
"bucket_name": CONFIG.y("postgresql.s3_backup.bucket"),
|
||||
"region_name": CONFIG.y("postgresql.s3_backup.region", "eu-central-1"),
|
||||
"default_acl": "private",
|
||||
"endpoint_url": CONFIG.y("postgresql.s3_backup.host"),
|
||||
"location": CONFIG.y("postgresql.s3_backup.location", ""),
|
||||
"verify": not CONFIG.y_bool("postgresql.s3_backup.insecure_skip_verify", False),
|
||||
}
|
||||
j_print(
|
||||
"Database backup to S3 is configured",
|
||||
host=CONFIG.y("postgresql.s3_backup.host"),
|
||||
)
|
||||
|
||||
# Sentry integration
|
||||
SENTRY_DSN = "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8"
|
||||
# Default to empty string as that is what docker has
|
||||
build_hash = os.environ.get(ENV_GIT_HASH_KEY, "")
|
||||
if build_hash == "":
|
||||
build_hash = "tagged"
|
||||
|
||||
env = get_env()
|
||||
_ERROR_REPORTING = CONFIG.y_bool("error_reporting.enabled", False)
|
||||
@ -413,7 +368,6 @@ if _ERROR_REPORTING:
|
||||
DjangoIntegration(transaction_style="function_name"),
|
||||
CeleryIntegration(),
|
||||
RedisIntegration(),
|
||||
Boto3Integration(),
|
||||
ThreadingIntegration(propagate_hub=True),
|
||||
],
|
||||
before_send=before_send,
|
||||
@ -422,35 +376,14 @@ if _ERROR_REPORTING:
|
||||
environment=CONFIG.y("error_reporting.environment", "customer"),
|
||||
send_default_pii=CONFIG.y_bool("error_reporting.send_pii", False),
|
||||
)
|
||||
set_tag("authentik.build_hash", build_hash)
|
||||
set_tag("authentik.build_hash", get_build_hash("tagged"))
|
||||
set_tag("authentik.env", env)
|
||||
set_tag("authentik.component", "backend")
|
||||
set_tag("authentik.uuid", sha512(SECRET_KEY.encode("ascii")).hexdigest()[:16])
|
||||
set_tag("authentik.uuid", sha512(str(SECRET_KEY).encode("ascii")).hexdigest()[:16])
|
||||
j_print(
|
||||
"Error reporting is enabled",
|
||||
env=CONFIG.y("error_reporting.environment", "customer"),
|
||||
)
|
||||
if not CONFIG.y_bool("disable_startup_analytics", False):
|
||||
should_send = env not in ["dev", "ci"]
|
||||
if should_send:
|
||||
try:
|
||||
get_http_session().post(
|
||||
"https://goauthentik.io/api/event",
|
||||
json={
|
||||
"domain": "authentik",
|
||||
"name": "pageview",
|
||||
"referrer": f"{__version__} ({build_hash})",
|
||||
"url": f"http://localhost/{env}?utm_source={__version__}&utm_medium={env}",
|
||||
},
|
||||
headers={
|
||||
"User-Agent": sha512(SECRET_KEY.encode("ascii")).hexdigest()[:16],
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
# pylint: disable=bare-except
|
||||
except: # nosec
|
||||
pass
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||
@ -532,12 +465,9 @@ _LOGGING_HANDLER_MAP = {
|
||||
"urllib3": "WARNING",
|
||||
"websockets": "WARNING",
|
||||
"daphne": "WARNING",
|
||||
"dbbackup": "ERROR",
|
||||
"kubernetes": "INFO",
|
||||
"asyncio": "WARNING",
|
||||
"aioredis": "WARNING",
|
||||
"s3transfer": "WARNING",
|
||||
"botocore": "WARNING",
|
||||
}
|
||||
for handler_name, level in _LOGGING_HANDLER_MAP.items():
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
|
@ -35,21 +35,21 @@ class LDAPProviderManager(ObjectManager):
|
||||
"goauthentik.io/sources/ldap/ms-userprincipalname",
|
||||
name="authentik default Active Directory Mapping: userPrincipalName",
|
||||
object_field="attributes.upn",
|
||||
expression="return ldap.get('userPrincipalName')",
|
||||
expression="return list_flatten(ldap.get('userPrincipalName'))",
|
||||
),
|
||||
EnsureExists(
|
||||
LDAPPropertyMapping,
|
||||
"goauthentik.io/sources/ldap/ms-givenName",
|
||||
name="authentik default Active Directory Mapping: givenName",
|
||||
object_field="attributes.givenName",
|
||||
expression="return ldap.get('givenName')",
|
||||
expression="return list_flatten(ldap.get('givenName'))",
|
||||
),
|
||||
EnsureExists(
|
||||
LDAPPropertyMapping,
|
||||
"goauthentik.io/sources/ldap/ms-sn",
|
||||
name="authentik default Active Directory Mapping: sn",
|
||||
object_field="attributes.sn",
|
||||
expression="return ldap.get('sn')",
|
||||
expression="return list_flatten(ldap.get('sn'))",
|
||||
),
|
||||
# OpenLDAP specific mappings
|
||||
EnsureExists(
|
||||
|
@ -1,13 +1,13 @@
|
||||
"""Sync LDAP Users and groups into authentik"""
|
||||
from typing import Any
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.db.models.base import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.merge import MERGE_LIST_UNIQUE
|
||||
from authentik.sources.ldap.auth import LDAP_DISTINGUISHED_NAME
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
|
||||
@ -123,8 +123,8 @@ class BaseLDAPSynchronizer:
|
||||
continue
|
||||
setattr(instance, key, value)
|
||||
final_atttributes = {}
|
||||
always_merger.merge(final_atttributes, instance.attributes)
|
||||
always_merger.merge(final_atttributes, data.get("attributes", {}))
|
||||
MERGE_LIST_UNIQUE.merge(final_atttributes, instance.attributes)
|
||||
MERGE_LIST_UNIQUE.merge(final_atttributes, data.get("attributes", {}))
|
||||
instance.attributes = final_atttributes
|
||||
instance.save()
|
||||
return (instance, False)
|
||||
|
@ -3,6 +3,7 @@ from ldap3.core.exceptions import LDAPException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
@ -52,5 +53,5 @@ def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str):
|
||||
)
|
||||
except LDAPException as exc:
|
||||
# No explicit event is created here as .set_status with an error will do that
|
||||
LOGGER.debug(exc)
|
||||
LOGGER.warning(exception_to_string(exc))
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
|
@ -18,6 +18,8 @@ from authentik.sources.saml.processors.constants import (
|
||||
RSA_SHA256,
|
||||
RSA_SHA384,
|
||||
RSA_SHA512,
|
||||
SAML_BINDING_POST,
|
||||
SAML_BINDING_REDIRECT,
|
||||
SAML_NAME_ID_FORMAT_EMAIL,
|
||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||
@ -37,6 +39,15 @@ class SAMLBindingTypes(models.TextChoices):
|
||||
POST = "POST", _("POST Binding")
|
||||
POST_AUTO = "POST_AUTO", _("POST Binding with auto-confirmation")
|
||||
|
||||
@property
|
||||
def uri(self) -> str:
|
||||
"""Convert database field to URI"""
|
||||
return {
|
||||
SAMLBindingTypes.POST: SAML_BINDING_POST,
|
||||
SAMLBindingTypes.POST_AUTO: SAML_BINDING_POST,
|
||||
SAMLBindingTypes.REDIRECT: SAML_BINDING_REDIRECT,
|
||||
}[self]
|
||||
|
||||
|
||||
class SAMLNameIDPolicy(models.TextChoices):
|
||||
"""SAML NameID Policies"""
|
||||
|
@ -10,7 +10,7 @@ from lxml.etree import Element # nosec
|
||||
from authentik.providers.saml.utils import get_random_id
|
||||
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode
|
||||
from authentik.providers.saml.utils.time import get_time_string
|
||||
from authentik.sources.saml.models import SAMLSource
|
||||
from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||
from authentik.sources.saml.processors.constants import (
|
||||
DIGEST_ALGORITHM_TRANSLATION_MAP,
|
||||
NS_MAP,
|
||||
@ -62,7 +62,7 @@ class RequestProcessor:
|
||||
auth_n_request.attrib["Destination"] = self.source.sso_url
|
||||
auth_n_request.attrib["ID"] = self.request_id
|
||||
auth_n_request.attrib["IssueInstant"] = self.issue_instant
|
||||
auth_n_request.attrib["ProtocolBinding"] = self.source.binding_type
|
||||
auth_n_request.attrib["ProtocolBinding"] = SAMLBindingTypes(self.source.binding_type).uri
|
||||
auth_n_request.attrib["Version"] = "2.0"
|
||||
# Create issuer object
|
||||
auth_n_request.append(self.get_issuer())
|
||||
|
@ -13,8 +13,8 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
|
||||
|
||||
def validate_not_configured_action(self, value):
|
||||
"""Ensure that a configuration stage is set when not_configured_action is configure"""
|
||||
configuration_stage = self.initial_data.get("configuration_stage")
|
||||
if value == NotConfiguredAction.CONFIGURE and configuration_stage is None:
|
||||
configuration_stages = self.initial_data.get("configuration_stages")
|
||||
if value == NotConfiguredAction.CONFIGURE and configuration_stages is None:
|
||||
raise ValidationError(
|
||||
(
|
||||
'When "Not configured action" is set to "Configure", '
|
||||
@ -29,7 +29,7 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
|
||||
fields = StageSerializer.Meta.fields + [
|
||||
"not_configured_action",
|
||||
"device_classes",
|
||||
"configuration_stage",
|
||||
"configuration_stages",
|
||||
]
|
||||
|
||||
|
||||
@ -38,5 +38,5 @@ class AuthenticatorValidateStageViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
queryset = AuthenticatorValidateStage.objects.all()
|
||||
serializer_class = AuthenticatorValidateStageSerializer
|
||||
filterset_fields = ["name", "not_configured_action", "configuration_stage"]
|
||||
filterset_fields = ["name", "not_configured_action", "configuration_stages"]
|
||||
ordering = ["name"]
|
||||
|
@ -0,0 +1,44 @@
|
||||
# Generated by Django 4.0.1 on 2022-01-05 22:09
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_configuration_stage(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
AuthenticatorValidateStage = apps.get_model(
|
||||
"authentik_stages_authenticator_validate", "AuthenticatorValidateStage"
|
||||
)
|
||||
|
||||
for stage in AuthenticatorValidateStage.objects.using(db_alias).all():
|
||||
if stage.configuration_stage:
|
||||
stage.configuration_stages.set([stage.configuration_stage])
|
||||
stage.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0021_auto_20211227_2103"),
|
||||
("authentik_stages_authenticator_validate", "0009_default_stage"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="authenticatorvalidatestage",
|
||||
name="configuration_stages",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text="Stages used to configure Authenticator when user doesn't have any compatible devices. After this configuration Stage passes, the user is not prompted again.",
|
||||
related_name="+",
|
||||
to="authentik_flows.Stage",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_configuration_stage),
|
||||
migrations.RemoveField(
|
||||
model_name="authenticatorvalidatestage",
|
||||
name="configuration_stage",
|
||||
),
|
||||
]
|
@ -38,16 +38,14 @@ class AuthenticatorValidateStage(Stage):
|
||||
choices=NotConfiguredAction.choices, default=NotConfiguredAction.SKIP
|
||||
)
|
||||
|
||||
configuration_stage = models.ForeignKey(
|
||||
configuration_stages = models.ManyToManyField(
|
||||
Stage,
|
||||
null=True,
|
||||
blank=True,
|
||||
default=None,
|
||||
on_delete=models.SET_DEFAULT,
|
||||
related_name="+",
|
||||
help_text=_(
|
||||
(
|
||||
"Stage used to configure Authenticator when user doesn't have any compatible "
|
||||
"Stages used to configure Authenticator when user doesn't have any compatible "
|
||||
"devices. After this configuration Stage passes, the user is not prompted again."
|
||||
)
|
||||
),
|
||||
|
@ -1,10 +1,12 @@
|
||||
"""Authenticator Validation"""
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django_otp import devices_for_user
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField, ListField
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField, ListField, UUIDField
|
||||
from rest_framework.serializers import ValidationError
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import cleanse_dict, sanitize_dict
|
||||
from authentik.flows.challenge import ChallengeResponse, ChallengeTypes, WithUserInfoChallenge
|
||||
@ -26,6 +28,18 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||
|
||||
LOGGER = get_logger()
|
||||
SESSION_STAGES = "goauthentik.io/stages/authenticator_validate/stages"
|
||||
SESSION_SELECTED_STAGE = "goauthentik.io/stages/authenticator_validate/selected_stage"
|
||||
SESSION_DEVICE_CHALLENGES = "goauthentik.io/stages/authenticator_validate/device_challenges"
|
||||
|
||||
|
||||
class SelectableStageSerializer(PassiveSerializer):
|
||||
"""Serializer for stages which can be selected by users"""
|
||||
|
||||
pk = UUIDField()
|
||||
name = CharField()
|
||||
verbose_name = CharField()
|
||||
meta_model_name = CharField()
|
||||
|
||||
|
||||
class AuthenticatorValidationChallenge(WithUserInfoChallenge):
|
||||
@ -33,12 +47,14 @@ class AuthenticatorValidationChallenge(WithUserInfoChallenge):
|
||||
|
||||
device_challenges = ListField(child=DeviceChallenge())
|
||||
component = CharField(default="ak-stage-authenticator-validate")
|
||||
configuration_stages = ListField(child=SelectableStageSerializer())
|
||||
|
||||
|
||||
class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
"""Challenge used for Code-based and WebAuthn authenticators"""
|
||||
|
||||
selected_challenge = DeviceChallenge(required=False)
|
||||
selected_stage = CharField(required=False)
|
||||
|
||||
code = CharField(required=False)
|
||||
webauthn = JSONField(required=False)
|
||||
@ -46,7 +62,7 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
component = CharField(default="ak-stage-authenticator-validate")
|
||||
|
||||
def _challenge_allowed(self, classes: list):
|
||||
device_challenges: list[dict] = self.stage.request.session.get("device_challenges")
|
||||
device_challenges: list[dict] = self.stage.request.session.get(SESSION_DEVICE_CHALLENGES)
|
||||
if not any(x["device_class"] in classes for x in device_challenges):
|
||||
raise ValidationError("No compatible device class allowed")
|
||||
|
||||
@ -71,7 +87,7 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
def validate_selected_challenge(self, challenge: dict) -> dict:
|
||||
"""Check which challenge the user has selected. Actual logic only used for SMS stage."""
|
||||
# First check if the challenge is valid
|
||||
for device_challenge in self.stage.request.session.get("device_challenges"):
|
||||
for device_challenge in self.stage.request.session.get(SESSION_DEVICE_CHALLENGES):
|
||||
if device_challenge.get("device_class", "") != challenge.get("device_class", ""):
|
||||
raise ValidationError("invalid challenge selected")
|
||||
if device_challenge.get("device_uid", "") != challenge.get("device_uid", ""):
|
||||
@ -84,6 +100,15 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
select_challenge(self.stage.request, devices.first())
|
||||
return challenge
|
||||
|
||||
def validate_selected_stage(self, stage_pk: str) -> str:
|
||||
"""Check that the selected stage is valid"""
|
||||
stages = self.stage.request.session.get(SESSION_STAGES, [])
|
||||
if not any(str(stage.pk) == stage_pk for stage in stages):
|
||||
raise ValidationError("Selected stage is invalid")
|
||||
LOGGER.debug("Setting selected stage to ", stage=stage_pk)
|
||||
self.stage.request.session[SESSION_SELECTED_STAGE] = stage_pk
|
||||
return stage_pk
|
||||
|
||||
def validate(self, attrs: dict):
|
||||
# Checking if the given data is from a valid device class is done above
|
||||
# Here we only check if the any data was sent at all
|
||||
@ -164,7 +189,7 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
||||
else:
|
||||
LOGGER.debug("No pending user, continuing")
|
||||
return self.executor.stage_ok()
|
||||
self.request.session["device_challenges"] = challenges
|
||||
self.request.session[SESSION_DEVICE_CHALLENGES] = challenges
|
||||
|
||||
# No allowed devices
|
||||
if len(challenges) < 1:
|
||||
@ -175,32 +200,74 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
||||
LOGGER.debug("Authenticator not configured, denying")
|
||||
return self.executor.stage_invalid()
|
||||
if stage.not_configured_action == NotConfiguredAction.CONFIGURE:
|
||||
if not stage.configuration_stage:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=(
|
||||
"Authenticator validation stage is set to configure user "
|
||||
"but no configuration flow is set."
|
||||
),
|
||||
stage=self,
|
||||
).from_http(self.request).set_user(user).save()
|
||||
return self.executor.stage_invalid()
|
||||
LOGGER.debug("Authenticator not configured, sending user to configure")
|
||||
# Because the foreign key to stage.configuration_stage points to
|
||||
# a base stage class, we need to do another lookup
|
||||
stage = Stage.objects.get_subclass(pk=stage.configuration_stage.pk)
|
||||
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
||||
# the configuration stage is next
|
||||
self.executor.plan.insert_stage(stage)
|
||||
return self.executor.stage_ok()
|
||||
LOGGER.debug("Authenticator not configured, forcing configure")
|
||||
return self.prepare_stages(user)
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def prepare_stages(self, user: User, *args, **kwargs) -> HttpResponse:
|
||||
"""Check how the user can configure themselves. If no stages are set, return an error.
|
||||
If a single stage is set, insert that stage directly. If multiple are selected, include
|
||||
them in the challenge."""
|
||||
stage: AuthenticatorValidateStage = self.executor.current_stage
|
||||
if not stage.configuration_stages.exists():
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=(
|
||||
"Authenticator validation stage is set to configure user "
|
||||
"but no configuration flow is set."
|
||||
),
|
||||
stage=self,
|
||||
).from_http(self.request).set_user(user).save()
|
||||
return self.executor.stage_invalid()
|
||||
if stage.configuration_stages.count() == 1:
|
||||
next_stage = Stage.objects.get_subclass(pk=stage.configuration_stages.first().pk)
|
||||
LOGGER.debug("Single stage configured, auto-selecting", stage=next_stage)
|
||||
self.request.session[SESSION_SELECTED_STAGE] = next_stage
|
||||
# Because that normal insetion only happens on post, we directly inject it here and
|
||||
# return it
|
||||
self.executor.plan.insert_stage(next_stage)
|
||||
return self.executor.stage_ok()
|
||||
stages = Stage.objects.filter(pk__in=stage.configuration_stages.all()).select_subclasses()
|
||||
self.request.session[SESSION_STAGES] = stages
|
||||
return super().get(self.request, *args, **kwargs)
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
res = super().post(request, *args, **kwargs)
|
||||
if (
|
||||
SESSION_SELECTED_STAGE in self.request.session
|
||||
and self.executor.current_stage.not_configured_action == NotConfiguredAction.CONFIGURE
|
||||
):
|
||||
LOGGER.debug("Got selected stage in session, running that")
|
||||
stage_pk = self.request.session.get(SESSION_SELECTED_STAGE)
|
||||
# Because the foreign key to stage.configuration_stage points to
|
||||
# a base stage class, we need to do another lookup
|
||||
stage = Stage.objects.get_subclass(pk=stage_pk)
|
||||
# plan.insert inserts at 1 index, so when stage_ok pops 0,
|
||||
# the configuration stage is next
|
||||
self.executor.plan.insert_stage(stage)
|
||||
return self.executor.stage_ok()
|
||||
return res
|
||||
|
||||
def get_challenge(self) -> AuthenticatorValidationChallenge:
|
||||
challenges = self.request.session["device_challenges"]
|
||||
challenges = self.request.session.get(SESSION_DEVICE_CHALLENGES, [])
|
||||
stages = self.request.session.get(SESSION_STAGES, [])
|
||||
stage_challenges = []
|
||||
for stage in stages:
|
||||
serializer = SelectableStageSerializer(
|
||||
data={
|
||||
"pk": stage.pk,
|
||||
"name": stage.name,
|
||||
"verbose_name": str(stage._meta.verbose_name),
|
||||
"meta_model_name": f"{stage._meta.app_label}.{stage._meta.model_name}",
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
stage_challenges.append(serializer.data)
|
||||
return AuthenticatorValidationChallenge(
|
||||
data={
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"device_challenges": challenges,
|
||||
"configuration_stages": stage_challenges,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -43,8 +43,8 @@ class AuthenticatorValidateStageTests(FlowTestCase):
|
||||
stage = AuthenticatorValidateStage.objects.create(
|
||||
name="foo",
|
||||
not_configured_action=NotConfiguredAction.CONFIGURE,
|
||||
configuration_stage=conf_stage,
|
||||
)
|
||||
stage.configuration_stages.set([conf_stage])
|
||||
flow = Flow.objects.create(name="test", slug="test", title="test")
|
||||
FlowStageBinding.objects.create(target=flow, stage=conf_stage, order=0)
|
||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=1)
|
||||
|
@ -18,7 +18,12 @@ class AuthenticateWebAuthnStageSerializer(StageSerializer):
|
||||
class Meta:
|
||||
|
||||
model = AuthenticateWebAuthnStage
|
||||
fields = StageSerializer.Meta.fields + ["configure_flow", "user_verification"]
|
||||
fields = StageSerializer.Meta.fields + [
|
||||
"configure_flow",
|
||||
"user_verification",
|
||||
"authenticator_attachment",
|
||||
"resident_key_requirement",
|
||||
]
|
||||
|
||||
|
||||
class AuthenticateWebAuthnStageViewSet(UsedByMixin, ModelViewSet):
|
||||
|
@ -0,0 +1,37 @@
|
||||
# Generated by Django 4.0.1 on 2022-01-12 21:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
(
|
||||
"authentik_stages_authenticator_webauthn",
|
||||
"0005_authenticatewebauthnstage_user_verification",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="authenticatewebauthnstage",
|
||||
name="authenticator_attachment",
|
||||
field=models.TextField(
|
||||
choices=[("platform", "Platform"), ("cross-platform", "Cross Platform")],
|
||||
default=None,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="authenticatewebauthnstage",
|
||||
name="resident_key_requirement",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("discouraged", "Discouraged"),
|
||||
("preferred", "Preferred"),
|
||||
("required", "Required"),
|
||||
],
|
||||
default="preferred",
|
||||
),
|
||||
),
|
||||
]
|
@ -31,6 +31,40 @@ class UserVerification(models.TextChoices):
|
||||
DISCOURAGED = "discouraged"
|
||||
|
||||
|
||||
class ResidentKeyRequirement(models.TextChoices):
|
||||
"""The Relying Party's preference for the authenticator to create a dedicated "client-side"
|
||||
credential for it. Requiring an authenticator to store a dedicated credential should not be
|
||||
done lightly due to the limited storage capacity of some types of authenticators.
|
||||
|
||||
Members:
|
||||
`DISCOURAGED`: The authenticator should not create a dedicated credential
|
||||
`PREFERRED`: The authenticator can create and store a dedicated credential, but if it
|
||||
doesn't that's alright too
|
||||
`REQUIRED`: The authenticator MUST create a dedicated credential. If it cannot, the RP
|
||||
is prepared for an error to occur.
|
||||
|
||||
https://www.w3.org/TR/webauthn-2/#enum-residentKeyRequirement
|
||||
"""
|
||||
|
||||
DISCOURAGED = "discouraged"
|
||||
PREFERRED = "preferred"
|
||||
REQUIRED = "required"
|
||||
|
||||
|
||||
class AuthenticatorAttachment(models.TextChoices):
|
||||
"""How an authenticator is connected to the client/browser.
|
||||
|
||||
Members:
|
||||
`PLATFORM`: A non-removable authenticator, like TouchID or Windows Hello
|
||||
`CROSS_PLATFORM`: A "roaming" authenticator, like a YubiKey
|
||||
|
||||
https://www.w3.org/TR/webauthn-2/#enumdef-authenticatorattachment
|
||||
"""
|
||||
|
||||
PLATFORM = "platform"
|
||||
CROSS_PLATFORM = "cross-platform"
|
||||
|
||||
|
||||
class AuthenticateWebAuthnStage(ConfigurableStage, Stage):
|
||||
"""WebAuthn stage"""
|
||||
|
||||
@ -38,6 +72,13 @@ class AuthenticateWebAuthnStage(ConfigurableStage, Stage):
|
||||
choices=UserVerification.choices,
|
||||
default=UserVerification.PREFERRED,
|
||||
)
|
||||
resident_key_requirement = models.TextField(
|
||||
choices=ResidentKeyRequirement.choices,
|
||||
default=ResidentKeyRequirement.PREFERRED,
|
||||
)
|
||||
authenticator_attachment = models.TextField(
|
||||
choices=AuthenticatorAttachment.choices, default=None, null=True
|
||||
)
|
||||
|
||||
@property
|
||||
def serializer(self) -> BaseSerializer:
|
||||
|
@ -13,7 +13,6 @@ from webauthn.helpers.structs import (
|
||||
AuthenticatorSelectionCriteria,
|
||||
PublicKeyCredentialCreationOptions,
|
||||
RegistrationCredential,
|
||||
ResidentKeyRequirement,
|
||||
)
|
||||
from webauthn.registration.verify_registration_response import VerifiedRegistration
|
||||
|
||||
@ -85,6 +84,12 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
|
||||
stage: AuthenticateWebAuthnStage = self.executor.current_stage
|
||||
user = self.get_pending_user()
|
||||
|
||||
# library accepts none so we store null in the database, but if there is a value
|
||||
# set, cast it to string to ensure it's not a django class
|
||||
authenticator_attachment = stage.authenticator_attachment
|
||||
if authenticator_attachment:
|
||||
authenticator_attachment = str(authenticator_attachment)
|
||||
|
||||
registration_options: PublicKeyCredentialCreationOptions = generate_registration_options(
|
||||
rp_id=get_rp_id(self.request),
|
||||
rp_name=self.request.tenant.branding_title,
|
||||
@ -92,8 +97,9 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
|
||||
user_name=user.username,
|
||||
user_display_name=user.name,
|
||||
authenticator_selection=AuthenticatorSelectionCriteria(
|
||||
resident_key=ResidentKeyRequirement.PREFERRED,
|
||||
resident_key=str(stage.resident_key_requirement),
|
||||
user_verification=str(stage.user_verification),
|
||||
authenticator_attachment=authenticator_attachment,
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ldap"
|
||||
)
|
||||
@ -27,6 +28,7 @@ func main() {
|
||||
log.FieldKeyTime: "timestamp",
|
||||
},
|
||||
})
|
||||
go debug.EnableDebugServer()
|
||||
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
||||
if !found {
|
||||
fmt.Println("env AUTHENTIK_HOST not set!")
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/proxyv2"
|
||||
)
|
||||
@ -32,6 +33,7 @@ func main() {
|
||||
log.FieldKeyTime: "timestamp",
|
||||
},
|
||||
})
|
||||
go debug.EnableDebugServer()
|
||||
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
||||
if !found {
|
||||
fmt.Println("env AUTHENTIK_HOST not set!")
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/config"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/gounicorn"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/proxyv2"
|
||||
@ -28,6 +29,7 @@ func main() {
|
||||
log.FieldKeyTime: "timestamp",
|
||||
},
|
||||
})
|
||||
go debug.EnableDebugServer()
|
||||
l := log.WithField("logger", "authentik.root")
|
||||
config.DefaultConfig()
|
||||
err := config.LoadConfig("./authentik/lib/default.yml")
|
||||
|
@ -17,7 +17,7 @@ services:
|
||||
image: redis:alpine
|
||||
restart: unless-stopped
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2021.12.5}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.2.1}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -35,10 +35,10 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "0.0.0.0:9000:9000"
|
||||
- "0.0.0.0:9443:9443"
|
||||
- "0.0.0.0:${AUTHENTIK_PORT_HTTP:-9000}:9000"
|
||||
- "0.0.0.0:${AUTHENTIK_PORT_HTTPS:-9443}:9443"
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2021.12.5}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2022.2.1}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
17
go.mod
17
go.mod
@ -8,17 +8,16 @@ require (
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||
github.com/garyburd/redigo v1.6.2 // indirect
|
||||
github.com/getsentry/sentry-go v0.12.0
|
||||
github.com/go-ldap/ldap/v3 v3.4.1
|
||||
github.com/go-openapi/runtime v0.21.0
|
||||
github.com/go-openapi/strfmt v0.21.1
|
||||
github.com/go-ldap/ldap/v3 v3.4.2
|
||||
github.com/go-openapi/runtime v0.23.0
|
||||
github.com/go-openapi/strfmt v0.21.2
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.0
|
||||
github.com/gorilla/securecookie v1.1.1
|
||||
github.com/gorilla/sessions v1.2.1
|
||||
github.com/gorilla/websocket v1.4.2
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/imdario/mergo v0.3.12
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||
@ -26,10 +25,12 @@ require (
|
||||
github.com/pires/go-proxyproto v0.6.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac // indirect
|
||||
github.com/prometheus/client_golang v1.11.0
|
||||
github.com/prometheus/client_golang v1.12.1
|
||||
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
goauthentik.io/api v0.2021124.9
|
||||
golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558
|
||||
github.com/stretchr/testify v1.7.0
|
||||
goauthentik.io/api v0.2021125.1
|
||||
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
gopkg.in/boj/redistore.v1 v1.0.0-20160128113310-fc113767cd6b
|
||||
|
44
go.sum
44
go.sum
@ -70,8 +70,9 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE=
|
||||
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
@ -124,8 +125,8 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
github.com/go-ldap/ldap/v3 v3.4.1 h1:fU/0xli6HY02ocbMuozHAYsaHLcnkLjvho2r5a34BUU=
|
||||
github.com/go-ldap/ldap/v3 v3.4.1/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg=
|
||||
github.com/go-ldap/ldap/v3 v3.4.2 h1:zFZKcXKLqZpFMrMQGHeHWKXbDTdNCmhGY9AK41zPh+8=
|
||||
github.com/go-ldap/ldap/v3 v3.4.2/go.mod h1:iYS1MdmrmceOJ1QOTnRXrIs7i3kloqtmGQjRvjKpyMg=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
@ -182,8 +183,8 @@ github.com/go-openapi/runtime v0.19.4/go.mod h1:X277bwSUBxVlCYR3r7xgZZGKVvBd/29g
|
||||
github.com/go-openapi/runtime v0.19.15/go.mod h1:dhGWCTKRXlAfGnQG0ONViOZpjfg0m2gUt9nTQPQZuoo=
|
||||
github.com/go-openapi/runtime v0.19.16/go.mod h1:5P9104EJgYcizotuXhEuUrzVc+j1RiSjahULvYmlv98=
|
||||
github.com/go-openapi/runtime v0.19.24/go.mod h1:Lm9YGCeecBnUUkFTxPC4s1+lwrkJ0pthx8YvyjCfkgk=
|
||||
github.com/go-openapi/runtime v0.21.0 h1:giZ8eT26R+/rx6RX2MkYjZPY8vPYVKDhP/mOazrQHzM=
|
||||
github.com/go-openapi/runtime v0.21.0/go.mod h1:aQg+kaIQEn+A2CRSY1TxbM8+sT9g2V3aLc1FbIAnbbs=
|
||||
github.com/go-openapi/runtime v0.23.0 h1:HX6ET2sHCIvaKeDDQoU01CtO1ekg5EkekHSkLTtWXH0=
|
||||
github.com/go-openapi/runtime v0.23.0/go.mod h1:aQg+kaIQEn+A2CRSY1TxbM8+sT9g2V3aLc1FbIAnbbs=
|
||||
github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||
github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||
github.com/go-openapi/spec v0.19.2/go.mod h1:sCxk3jxKgioEJikev4fgkNmwS+3kuYdJtcsZsD5zxMY=
|
||||
@ -207,8 +208,8 @@ github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLs
|
||||
github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
||||
github.com/go-openapi/strfmt v0.20.2/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
|
||||
github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
|
||||
github.com/go-openapi/strfmt v0.21.1 h1:G6s2t5V5kGCHLVbSdZ/6lI8Wm4OzoPFkc3/cjAsKQrM=
|
||||
github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
|
||||
github.com/go-openapi/strfmt v0.21.2 h1:5NDNgadiX1Vhemth/TH4gCGopWSTdDjxl60H3B7f+os=
|
||||
github.com/go-openapi/strfmt v0.21.2/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
|
||||
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
@ -333,8 +334,8 @@ github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+
|
||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
|
||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
@ -360,6 +361,7 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
@ -427,6 +429,7 @@ github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJ
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
@ -467,8 +470,9 @@ github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac/go.mod h1:hoL
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
||||
github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=
|
||||
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
|
||||
github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk=
|
||||
github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
@ -476,13 +480,17 @@ github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
||||
github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=
|
||||
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
|
||||
github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4=
|
||||
github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
|
||||
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b h1:aUNXCGgukb4gtY99imuIeoh8Vr0GSwAlYxPAhqZrpFc=
|
||||
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg=
|
||||
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
@ -562,8 +570,8 @@ go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
goauthentik.io/api v0.2021124.9 h1:l6kzTggi8lt+Vmm5js7oxp9U/bXoOEIl4565bUeepuM=
|
||||
goauthentik.io/api v0.2021124.9/go.mod h1:02nnD4FRd8lu8A1+ZuzqownBgvAhdCKzqkKX8v7JMTE=
|
||||
goauthentik.io/api v0.2021125.1 h1:Ja00N1D1wjqFiR90JV8nDayhmm39uLudzsJhwCjoQJ4=
|
||||
goauthentik.io/api v0.2021125.1/go.mod h1:02nnD4FRd8lu8A1+ZuzqownBgvAhdCKzqkKX8v7JMTE=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
@ -655,6 +663,7 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211008194852-3b03d305991f h1:1scJEYZBaF48BaG6tYbtxmLcXqwYGSfGcMoStTqkkIw=
|
||||
golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@ -663,8 +672,8 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558 h1:D7nTwh4J0i+5mW4Zjzn5omvlr6YBcWywE6KOcatyNxY=
|
||||
golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c h1:pkQiBZBvdos9qq4wBAHqlzuZHEXo07pqV06ef90u1WI=
|
||||
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -731,8 +740,9 @@ golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw=
|
||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 h1:XfKQ4OlFl8okEOr5UvAqFRVj8pY/4yfcXrddB8qAbU0=
|
||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -5,16 +5,24 @@ import (
|
||||
"os"
|
||||
)
|
||||
|
||||
func BUILD() string {
|
||||
func BUILD(def string) string {
|
||||
build := os.Getenv("GIT_BUILD_HASH")
|
||||
if build == "" {
|
||||
return "tagged"
|
||||
return def
|
||||
}
|
||||
return build
|
||||
}
|
||||
|
||||
func OutpostUserAgent() string {
|
||||
return fmt.Sprintf("authentik-outpost@%s (build=%s)", VERSION, BUILD())
|
||||
func FullVersion() string {
|
||||
ver := VERSION
|
||||
if b := BUILD(""); b != "" {
|
||||
ver = fmt.Sprintf("%s.%s", ver, b)
|
||||
}
|
||||
return ver
|
||||
}
|
||||
|
||||
const VERSION = "2021.12.5"
|
||||
func OutpostUserAgent() string {
|
||||
return fmt.Sprintf("authentik-outpost@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2022.2.1"
|
||||
|
24
internal/debug/debug.go
Normal file
24
internal/debug/debug.go
Normal file
@ -0,0 +1,24 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func EnableDebugServer() {
|
||||
l := log.WithField("logger", "authentik.go_debugger")
|
||||
if deb := os.Getenv("AUTHENTIK_DEBUG"); strings.ToLower(deb) != "true" {
|
||||
l.Info("not enabling debug server, set `AUTHENTIK_DEBUG` to `true` to enable it.")
|
||||
}
|
||||
h := http.NewServeMux()
|
||||
h.HandleFunc("/debug/pprof/", pprof.Index)
|
||||
h.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline)
|
||||
h.HandleFunc("/debug/pprof/profile", pprof.Profile)
|
||||
h.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
|
||||
h.HandleFunc("/debug/pprof/trace", pprof.Trace)
|
||||
l.Println(http.ListenAndServe("0.0.0.0:9900", nil))
|
||||
}
|
@ -171,7 +171,7 @@ func (a *APIController) StartBackgorundTasks() error {
|
||||
"outpost_type": a.Server.Type(),
|
||||
"uuid": a.instanceUUID.String(),
|
||||
"version": constants.VERSION,
|
||||
"build": constants.BUILD(),
|
||||
"build": constants.BUILD("tagged"),
|
||||
}).Set(1)
|
||||
go func() {
|
||||
a.logger.Debug("Starting WS Handler...")
|
||||
|
@ -51,7 +51,7 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID string) error {
|
||||
Instruction: WebsocketInstructionHello,
|
||||
Args: map[string]interface{}{
|
||||
"version": constants.VERSION,
|
||||
"buildHash": constants.BUILD(),
|
||||
"buildHash": constants.BUILD("tagged"),
|
||||
"uuid": ac.instanceUUID.String(),
|
||||
},
|
||||
}
|
||||
@ -151,7 +151,7 @@ func (ac *APIController) startWSHandler() {
|
||||
"outpost_type": ac.Server.Type(),
|
||||
"uuid": ac.instanceUUID.String(),
|
||||
"version": constants.VERSION,
|
||||
"build": constants.BUILD(),
|
||||
"build": constants.BUILD("tagged"),
|
||||
}).SetToCurrentTime()
|
||||
}
|
||||
}
|
||||
@ -165,7 +165,7 @@ func (ac *APIController) startWSHealth() {
|
||||
Instruction: WebsocketInstructionHello,
|
||||
Args: map[string]interface{}{
|
||||
"version": constants.VERSION,
|
||||
"buildHash": constants.BUILD(),
|
||||
"buildHash": constants.BUILD("tagged"),
|
||||
"uuid": ac.instanceUUID.String(),
|
||||
},
|
||||
}
|
||||
@ -205,7 +205,7 @@ func (ac *APIController) startIntervalUpdater() {
|
||||
"outpost_type": ac.Server.Type(),
|
||||
"uuid": ac.instanceUUID.String(),
|
||||
"version": constants.VERSION,
|
||||
"build": constants.BUILD(),
|
||||
"build": constants.BUILD("tagged"),
|
||||
}).SetToCurrentTime()
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ func doGlobalSetup(outpost api.Outpost, globalConfig api.Config) {
|
||||
} else {
|
||||
l.Debug("Managed outpost, not setting global log level")
|
||||
}
|
||||
l.WithField("hash", constants.BUILD()).WithField("version", constants.VERSION).Info("Starting authentik outpost")
|
||||
l.WithField("hash", constants.BUILD("tagged")).WithField("version", constants.VERSION).Info("Starting authentik outpost")
|
||||
|
||||
if globalConfig.ErrorReporting.Enabled {
|
||||
dsn := "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8"
|
||||
|
66
internal/outpost/ak/test.go
Normal file
66
internal/outpost/ak/test.go
Normal file
@ -0,0 +1,66 @@
|
||||
package ak
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/securecookie"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"goauthentik.io/api"
|
||||
)
|
||||
|
||||
func TestSecret() string {
|
||||
return base64.RawURLEncoding.EncodeToString(securecookie.GenerateRandomKey(32))
|
||||
}
|
||||
|
||||
func MockConfig() api.Config {
|
||||
return *api.NewConfig(
|
||||
*api.NewErrorReportingConfig(false, "test", false, 0.0),
|
||||
[]api.CapabilitiesEnum{},
|
||||
100,
|
||||
100,
|
||||
100,
|
||||
100,
|
||||
)
|
||||
}
|
||||
|
||||
func MockAK(outpost api.Outpost, globalConfig api.Config) *APIController {
|
||||
config := api.NewConfiguration()
|
||||
config.HTTPClient = &http.Client{
|
||||
Transport: GetTLSTransport(),
|
||||
}
|
||||
token := TestSecret()
|
||||
config.AddDefaultHeader("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
|
||||
// create the API client, with the transport
|
||||
apiClient := api.NewAPIClient(config)
|
||||
|
||||
log := log.WithField("logger", "authentik.outpost.ak-api-controller")
|
||||
|
||||
log.WithField("name", outpost.Name).Debug("Fetched outpost configuration")
|
||||
|
||||
log.Debug("Fetched global configuration")
|
||||
|
||||
// doGlobalSetup is called by the OnRefresh handler, which ticks on start
|
||||
// doGlobalSetup(outpost, akConfig)
|
||||
|
||||
ac := &APIController{
|
||||
Client: apiClient,
|
||||
GlobalConfig: globalConfig,
|
||||
|
||||
token: token,
|
||||
logger: log,
|
||||
|
||||
reloadOffset: time.Duration(rand.Intn(10)) * time.Second,
|
||||
instanceUUID: uuid.New(),
|
||||
Outpost: outpost,
|
||||
wsBackoffMultiplier: 1,
|
||||
refreshHandlers: make([]func(), 0),
|
||||
}
|
||||
ac.logger.WithField("offset", ac.reloadOffset.String()).Debug("HA Reload offset")
|
||||
return ac
|
||||
}
|
@ -16,6 +16,7 @@ import (
|
||||
"goauthentik.io/internal/outpost/ldap/flags"
|
||||
"goauthentik.io/internal/outpost/ldap/metrics"
|
||||
"goauthentik.io/internal/outpost/ldap/server"
|
||||
"goauthentik.io/internal/outpost/ldap/utils"
|
||||
)
|
||||
|
||||
const ContextUserKey = "ak_user"
|
||||
@ -35,7 +36,7 @@ func NewDirectBinder(si server.LDAPServerInstance) *DirectBinder {
|
||||
}
|
||||
|
||||
func (db *DirectBinder) GetUsername(dn string) (string, error) {
|
||||
if !strings.HasSuffix(strings.ToLower(dn), strings.ToLower(db.si.GetBaseDN())) {
|
||||
if !utils.HasSuffixNoCase(dn, db.si.GetBaseDN()) {
|
||||
return "", errors.New("invalid base DN")
|
||||
}
|
||||
dns, err := goldap.ParseDN(dn)
|
||||
|
@ -12,11 +12,7 @@ func (pi *ProviderInstance) UserEntry(u api.User) *ldap.Entry {
|
||||
attrs := utils.AKAttrsToLDAP(u.Attributes)
|
||||
|
||||
attrs = utils.EnsureAttributes(attrs, map[string][]string{
|
||||
"memberOf": pi.GroupsForUser(u),
|
||||
// Old fields for backwards compatibility
|
||||
"accountStatus": {utils.BoolToString(*u.IsActive)},
|
||||
"superuser": {utils.BoolToString(u.IsSuperuser)},
|
||||
// End old fields
|
||||
"memberOf": pi.GroupsForUser(u),
|
||||
"goauthentik.io/ldap/active": {utils.BoolToString(*u.IsActive)},
|
||||
"goauthentik.io/ldap/superuser": {utils.BoolToString(u.IsSuperuser)},
|
||||
"cn": {u.Username},
|
||||
|
@ -124,7 +124,7 @@ func (pi *ProviderInstance) GetBaseEntry() *ldap.Entry {
|
||||
},
|
||||
{
|
||||
Name: "vendorVersion",
|
||||
Values: []string{fmt.Sprintf("authentik LDAP Outpost Version %s (build %s)", constants.VERSION, constants.BUILD())},
|
||||
Values: []string{fmt.Sprintf("authentik LDAP Outpost Version %s", constants.FullVersion())},
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -140,26 +140,26 @@ func (pi *ProviderInstance) GetNeededObjects(scope int, baseDN string, filterOC
|
||||
// If our requested base DN doesn't match any of the container DNs, then
|
||||
// we're probably loading a user or group. If it does, then make sure our
|
||||
// scope will eventually take us to users or groups.
|
||||
if (baseDN == pi.BaseDN || strings.HasSuffix(baseDN, pi.UserDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetUserOCs()) {
|
||||
if (strings.EqualFold(baseDN, pi.BaseDN) || utils.HasSuffixNoCase(baseDN, pi.UserDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetUserOCs()) {
|
||||
if baseDN != pi.UserDN && baseDN != pi.BaseDN ||
|
||||
baseDN == pi.BaseDN && scope > 1 ||
|
||||
baseDN == pi.UserDN && scope > 0 {
|
||||
strings.EqualFold(baseDN, pi.BaseDN) && scope > 1 ||
|
||||
strings.EqualFold(baseDN, pi.UserDN) && scope > 0 {
|
||||
needUsers = true
|
||||
}
|
||||
}
|
||||
|
||||
if (baseDN == pi.BaseDN || strings.HasSuffix(baseDN, pi.GroupDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetGroupOCs()) {
|
||||
if (strings.EqualFold(baseDN, pi.BaseDN) || utils.HasSuffixNoCase(baseDN, pi.GroupDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetGroupOCs()) {
|
||||
if baseDN != pi.GroupDN && baseDN != pi.BaseDN ||
|
||||
baseDN == pi.BaseDN && scope > 1 ||
|
||||
baseDN == pi.GroupDN && scope > 0 {
|
||||
strings.EqualFold(baseDN, pi.BaseDN) && scope > 1 ||
|
||||
strings.EqualFold(baseDN, pi.GroupDN) && scope > 0 {
|
||||
needGroups = true
|
||||
}
|
||||
}
|
||||
|
||||
if (baseDN == pi.BaseDN || strings.HasSuffix(baseDN, pi.VirtualGroupDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetVirtualGroupOCs()) {
|
||||
if (strings.EqualFold(baseDN, pi.BaseDN) || utils.HasSuffixNoCase(baseDN, pi.VirtualGroupDN)) && utils.IncludeObjectClass(filterOC, ldapConstants.GetVirtualGroupOCs()) {
|
||||
if baseDN != pi.VirtualGroupDN && baseDN != pi.BaseDN ||
|
||||
baseDN == pi.BaseDN && scope > 1 ||
|
||||
baseDN == pi.VirtualGroupDN && scope > 0 {
|
||||
strings.EqualFold(baseDN, pi.BaseDN) && scope > 1 ||
|
||||
strings.EqualFold(baseDN, pi.VirtualGroupDN) && scope > 0 {
|
||||
needUsers = true
|
||||
}
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ var (
|
||||
func RunServer() {
|
||||
m := mux.NewRouter()
|
||||
l := log.WithField("logger", "authentik.outpost.metrics")
|
||||
m.HandleFunc("/akprox/ping", func(rw http.ResponseWriter, r *http.Request) {
|
||||
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(204)
|
||||
})
|
||||
m.Path("/metrics").Handler(promhttp.Handler())
|
||||
|
@ -44,7 +44,7 @@ func (ds *DirectSearcher) SearchBase(req *search.Request, authz bool) (ldap.Serv
|
||||
},
|
||||
{
|
||||
Name: "vendorVersion",
|
||||
Values: []string{fmt.Sprintf("authentik LDAP Outpost Version %s (build %s)", constants.VERSION, constants.BUILD())},
|
||||
Values: []string{fmt.Sprintf("authentik LDAP Outpost Version %s", constants.FullVersion())},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -36,7 +36,7 @@ func NewDirectSearcher(si server.LDAPServerInstance) *DirectSearcher {
|
||||
|
||||
func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult, error) {
|
||||
accsp := sentry.StartSpan(req.Context(), "authentik.providers.ldap.search.check_access")
|
||||
baseDN := strings.ToLower(ds.si.GetBaseDN())
|
||||
baseDN := ds.si.GetBaseDN()
|
||||
|
||||
filterOC, err := ldap.GetFilterObjectClass(req.Filter)
|
||||
if err != nil {
|
||||
@ -59,7 +59,7 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
|
||||
}
|
||||
if !strings.HasSuffix(req.BindDN, ","+baseDN) {
|
||||
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": ds.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
@ -105,7 +105,7 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope := req.SearchRequest.Scope
|
||||
needUsers, needGroups := ds.si.GetNeededObjects(scope, req.BaseDN, filterOC)
|
||||
|
||||
if scope >= 0 && req.BaseDN == baseDN {
|
||||
if scope >= 0 && strings.EqualFold(req.BaseDN, baseDN) {
|
||||
if utils.IncludeObjectClass(filterOC, constants.GetDomainOCs()) {
|
||||
entries = append(entries, ds.si.GetBaseEntry())
|
||||
}
|
||||
@ -209,8 +209,8 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultOperationsError}, err
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ds.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ds.si.GetBaseUserDN())) {
|
||||
singleu := strings.HasSuffix(req.BaseDN, ","+ds.si.GetBaseUserDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ds.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ds.si.GetBaseUserDN())) {
|
||||
singleu := utils.HasSuffixNoCase(req.BaseDN, ","+ds.si.GetBaseUserDN())
|
||||
|
||||
if !singleu && utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ds.si.GetBaseUserDN(), constants.OUUsers))
|
||||
@ -220,7 +220,7 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
if scope >= 0 && users != nil && utils.IncludeObjectClass(filterOC, constants.GetUserOCs()) {
|
||||
for _, u := range *users {
|
||||
entry := ds.si.UserEntry(u)
|
||||
if req.BaseDN == entry.DN || !singleu {
|
||||
if strings.EqualFold(req.BaseDN, entry.DN) || !singleu {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
}
|
||||
@ -229,8 +229,8 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope += 1 // Return the scope to what it was before we descended
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ds.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ds.si.GetBaseGroupDN())) {
|
||||
singleg := strings.HasSuffix(req.BaseDN, ","+ds.si.GetBaseGroupDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ds.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ds.si.GetBaseGroupDN())) {
|
||||
singleg := utils.HasSuffixNoCase(req.BaseDN, ","+ds.si.GetBaseGroupDN())
|
||||
|
||||
if !singleg && utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ds.si.GetBaseGroupDN(), constants.OUGroups))
|
||||
@ -240,7 +240,7 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
if scope >= 0 && groups != nil && utils.IncludeObjectClass(filterOC, constants.GetGroupOCs()) {
|
||||
for _, g := range *groups {
|
||||
entry := group.FromAPIGroup(g, ds.si).Entry()
|
||||
if req.BaseDN == entry.DN || !singleg {
|
||||
if strings.EqualFold(req.BaseDN, entry.DN) || !singleg {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
}
|
||||
@ -249,8 +249,8 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope += 1 // Return the scope to what it was before we descended
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ds.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ds.si.GetBaseVirtualGroupDN())) {
|
||||
singlevg := strings.HasSuffix(req.BaseDN, ","+ds.si.GetBaseVirtualGroupDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ds.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ds.si.GetBaseVirtualGroupDN())) {
|
||||
singlevg := utils.HasSuffixNoCase(req.BaseDN, ","+ds.si.GetBaseVirtualGroupDN())
|
||||
|
||||
if !singlevg || utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ds.si.GetBaseVirtualGroupDN(), constants.OUVirtualGroups))
|
||||
@ -260,7 +260,7 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
if scope >= 0 && users != nil && utils.IncludeObjectClass(filterOC, constants.GetVirtualGroupOCs()) {
|
||||
for _, u := range *users {
|
||||
entry := group.FromAPIUser(u, ds.si).Entry()
|
||||
if req.BaseDN == entry.DN || !singlevg {
|
||||
if strings.EqualFold(req.BaseDN, entry.DN) || !singlevg {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ func NewMemorySearcher(si server.LDAPServerInstance) *MemorySearcher {
|
||||
|
||||
func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult, error) {
|
||||
accsp := sentry.StartSpan(req.Context(), "authentik.providers.ldap.search.check_access")
|
||||
baseDN := strings.ToLower(ms.si.GetBaseDN())
|
||||
baseDN := ms.si.GetBaseDN()
|
||||
|
||||
filterOC, err := ldap.GetFilterObjectClass(req.Filter)
|
||||
if err != nil {
|
||||
@ -62,7 +62,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
}).Inc()
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
|
||||
}
|
||||
if !strings.HasSuffix(req.BindDN, ","+baseDN) {
|
||||
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": ms.si.GetOutpostName(),
|
||||
"type": "search",
|
||||
@ -92,7 +92,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope := req.SearchRequest.Scope
|
||||
needUsers, needGroups := ms.si.GetNeededObjects(scope, req.BaseDN, filterOC)
|
||||
|
||||
if scope >= 0 && req.BaseDN == baseDN {
|
||||
if scope >= 0 && strings.EqualFold(req.BaseDN, baseDN) {
|
||||
if utils.IncludeObjectClass(filterOC, constants.GetDomainOCs()) {
|
||||
entries = append(entries, ms.si.GetBaseEntry())
|
||||
}
|
||||
@ -139,7 +139,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
// as a member.
|
||||
for _, u := range g.UsersObj {
|
||||
if flags.UserPk == u.Pk {
|
||||
// TODO: Is there a better way to clone this object?
|
||||
//TODO: Is there a better way to clone this object?
|
||||
fg := api.NewGroup(g.Pk, g.Name, g.Parent, g.ParentName, []int32{flags.UserPk}, []api.GroupMember{u})
|
||||
fg.SetAttributes(*g.Attributes)
|
||||
fg.SetIsSuperuser(*g.IsSuperuser)
|
||||
@ -155,8 +155,8 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultOperationsError}, err
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ms.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ms.si.GetBaseUserDN())) {
|
||||
singleu := strings.HasSuffix(req.BaseDN, ","+ms.si.GetBaseUserDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ms.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ms.si.GetBaseUserDN())) {
|
||||
singleu := utils.HasSuffixNoCase(req.BaseDN, ","+ms.si.GetBaseUserDN())
|
||||
|
||||
if !singleu && utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ms.si.GetBaseUserDN(), constants.OUUsers))
|
||||
@ -166,7 +166,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
if scope >= 0 && users != nil && utils.IncludeObjectClass(filterOC, constants.GetUserOCs()) {
|
||||
for _, u := range *users {
|
||||
entry := ms.si.UserEntry(u)
|
||||
if req.BaseDN == entry.DN || !singleu {
|
||||
if strings.EqualFold(req.BaseDN, entry.DN) || !singleu {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
}
|
||||
@ -175,8 +175,8 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope += 1 // Return the scope to what it was before we descended
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ms.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ms.si.GetBaseGroupDN())) {
|
||||
singleg := strings.HasSuffix(req.BaseDN, ","+ms.si.GetBaseGroupDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ms.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ms.si.GetBaseGroupDN())) {
|
||||
singleg := utils.HasSuffixNoCase(req.BaseDN, ","+ms.si.GetBaseGroupDN())
|
||||
|
||||
if !singleg && utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ms.si.GetBaseGroupDN(), constants.OUGroups))
|
||||
@ -185,7 +185,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
|
||||
if scope >= 0 && groups != nil && utils.IncludeObjectClass(filterOC, constants.GetGroupOCs()) {
|
||||
for _, g := range groups {
|
||||
if req.BaseDN == g.DN || !singleg {
|
||||
if strings.EqualFold(req.BaseDN, g.DN) || !singleg {
|
||||
entries = append(entries, g.Entry())
|
||||
}
|
||||
}
|
||||
@ -194,8 +194,8 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
scope += 1 // Return the scope to what it was before we descended
|
||||
}
|
||||
|
||||
if scope >= 0 && (req.BaseDN == ms.si.GetBaseDN() || strings.HasSuffix(req.BaseDN, ms.si.GetBaseVirtualGroupDN())) {
|
||||
singlevg := strings.HasSuffix(req.BaseDN, ","+ms.si.GetBaseVirtualGroupDN())
|
||||
if scope >= 0 && (strings.EqualFold(req.BaseDN, ms.si.GetBaseDN()) || utils.HasSuffixNoCase(req.BaseDN, ms.si.GetBaseVirtualGroupDN())) {
|
||||
singlevg := utils.HasSuffixNoCase(req.BaseDN, ","+ms.si.GetBaseVirtualGroupDN())
|
||||
|
||||
if !singlevg && utils.IncludeObjectClass(filterOC, constants.GetContainerOCs()) {
|
||||
entries = append(entries, utils.GetContainerEntry(filterOC, ms.si.GetBaseVirtualGroupDN(), constants.OUVirtualGroups))
|
||||
@ -205,7 +205,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
||||
if scope >= 0 && users != nil && utils.IncludeObjectClass(filterOC, constants.GetVirtualGroupOCs()) {
|
||||
for _, u := range *users {
|
||||
entry := group.FromAPIUser(u, ms.si).Entry()
|
||||
if req.BaseDN == entry.DN || !singlevg {
|
||||
if strings.EqualFold(req.BaseDN, entry.DN) || !singlevg {
|
||||
entries = append(entries, entry)
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ type Request struct {
|
||||
func NewRequest(bindDN string, searchReq ldap.SearchRequest, conn net.Conn) (*Request, *sentry.Span) {
|
||||
rid := uuid.New().String()
|
||||
bindDN = strings.ToLower(bindDN)
|
||||
searchReq.BaseDN = strings.ToLower(searchReq.BaseDN)
|
||||
span := sentry.StartSpan(context.TODO(), "authentik.providers.ldap.search", sentry.TransactionName("authentik.providers.ldap.search"))
|
||||
span.Description = fmt.Sprintf("%s (%s)", searchReq.BaseDN, ldap.ScopeMap[searchReq.Scope])
|
||||
span.SetTag("request_uid", rid)
|
||||
|
@ -2,6 +2,7 @@ package utils
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/nmcclain/ldap"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -117,3 +118,7 @@ func GetContainerEntry(filterOC string, dn string, ou string) *ldap.Entry {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func HasSuffixNoCase(s1 string, s2 string) bool {
|
||||
return strings.HasSuffix(strings.ToLower(s1), strings.ToLower(s2))
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user