Compare commits

..

5 Commits

Author SHA1 Message Date
fe5d22ce6c release: 2021.8.5 2021-09-10 22:10:35 +02:00
0e30b6ee55 lifecycle: fix worker startup error when docker socket's group is not called docker
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-09-10 22:05:00 +02:00
6cbba45291 web: ignore network error
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-09-10 21:51:11 +02:00
ba023a3bba outpost: update global outpost config on refresh
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-09-10 21:51:02 +02:00
6c805bcf32 sources/oauth: don't cancel flow when redirecting
Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
2021-09-10 21:50:45 +02:00
525 changed files with 6166 additions and 9580 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2021.9.5
current_version = 2021.8.5
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)

View File

@ -27,7 +27,7 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- authentik version: [e.g. 0.10.0-stable]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -20,7 +20,7 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- authentik version: [e.g. 0.10.0-stable]
- Deployment: [e.g. docker-compose, helm]
**Additional context**

View File

@ -1 +0,0 @@
keypair

View File

@ -25,14 +25,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run pylint
run: pipenv run pylint authentik tests lifecycle
@ -43,14 +43,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run black
run: pipenv run black --check authentik tests lifecycle
@ -61,14 +61,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run isort
run: pipenv run isort --check authentik tests lifecycle
@ -79,14 +79,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run bandit
run: pipenv run bandit -r authentik tests lifecycle
@ -113,14 +113,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run migrations
run: pipenv run python -m lifecycle.migrate
@ -128,8 +128,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: '3.9'
@ -138,26 +136,21 @@ jobs:
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
git checkout $(git describe --abbrev=0 --match 'version/*')
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- name: run migrations to stable
run: pipenv run python -m lifecycle.migrate
- name: prepare variables
id: ev
run: |
python ./scripts/gh_do_set_branch.py
- name: checkout current code
run: |
set -x
git fetch
git checkout ${{ steps.ev.outputs.branchName }}
git checkout $GITHUB_REF
pipenv sync --dev
- name: migrate to latest
run: pipenv run python -m lifecycle.migrate
@ -168,14 +161,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- uses: testspace-com/setup-testspace@v1
with:
@ -197,14 +190,14 @@ jobs:
- uses: actions/setup-python@v2
with:
python-version: '3.9'
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: scripts/ci_prepare.sh
- uses: testspace-com/setup-testspace@v1
with:
@ -236,14 +229,14 @@ jobs:
- uses: testspace-com/setup-testspace@v1
with:
domain: ${{github.repository_owner}}
# - id: cache-pipenv
# uses: actions/cache@v2.1.6
# with:
# path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
- id: cache-pipenv
uses: actions/cache@v2.1.6
with:
path: ~/.local/share/virtualenvs
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: prepare
# env:
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
env:
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
run: |
scripts/ci_prepare.sh
docker-compose -f tests/e2e/ci.docker-compose.yml up -d
@ -304,6 +297,6 @@ jobs:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
beryju.org/authentik/server:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}

View File

@ -18,6 +18,9 @@ jobs:
- uses: actions/setup-go@v2
with:
go-version: '^1.16.3'
- name: Generate API
run: |
make gen-outpost
- name: Run linter
run: |
# Create folder structure for go embeds
@ -41,6 +44,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: prepare variables
@ -65,5 +70,6 @@ jobs:
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchName }}-${{ steps.ev.outputs.timestamp }}
beryju.org/authentik/outpost-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}

View File

@ -61,7 +61,7 @@ jobs:
npm install
- name: Generate API
run: make gen-web
- name: lit-analyse
- name: prettier
run: |
cd web
npm run lit-analyse

View File

@ -33,14 +33,14 @@ jobs:
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik:2021.9.5,
beryju/authentik:2021.8.5,
beryju/authentik:latest,
ghcr.io/goauthentik/server:2021.9.5,
ghcr.io/goauthentik/server:2021.8.5,
ghcr.io/goauthentik/server:latest
platforms: linux/amd64,linux/arm64
context: .
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.9.5', 'rc') }}
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
run: |
docker pull beryju/authentik:latest
docker tag beryju/authentik:latest beryju/authentik:stable
@ -75,14 +75,14 @@ jobs:
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-proxy:2021.9.5,
beryju/authentik-proxy:2021.8.5,
beryju/authentik-proxy:latest,
ghcr.io/goauthentik/proxy:2021.9.5,
ghcr.io/goauthentik/proxy:2021.8.5,
ghcr.io/goauthentik/proxy:latest
file: proxy.Dockerfile
platforms: linux/amd64,linux/arm64
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.9.5', 'rc') }}
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
run: |
docker pull beryju/authentik-proxy:latest
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
@ -117,14 +117,14 @@ jobs:
with:
push: ${{ github.event_name == 'release' }}
tags: |
beryju/authentik-ldap:2021.9.5,
beryju/authentik-ldap:2021.8.5,
beryju/authentik-ldap:latest,
ghcr.io/goauthentik/ldap:2021.9.5,
ghcr.io/goauthentik/ldap:2021.8.5,
ghcr.io/goauthentik/ldap:latest
file: ldap.Dockerfile
platforms: linux/amd64,linux/arm64
- name: Building Docker Image (stable)
if: ${{ github.event_name == 'release' && !contains('2021.9.5', 'rc') }}
if: ${{ github.event_name == 'release' && !contains('2021.8.5', 'rc') }}
run: |
docker pull beryju/authentik-ldap:latest
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
@ -175,7 +175,7 @@ jobs:
SENTRY_PROJECT: authentik
SENTRY_URL: https://sentry.beryju.org
with:
version: authentik@2021.9.5
version: authentik@2021.8.5
environment: beryjuorg-prod
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

View File

@ -27,7 +27,7 @@ jobs:
docker-compose run -u root server test
- name: Extract version number
id: get_version
uses: actions/github-script@v5
uses: actions/github-script@v4.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@ -117,7 +117,7 @@ This section guides you through submitting a bug report for authentik. Following
Whenever authentik encounters an error, it will be logged as an Event with the type `system_exception`. This event type has a button to directly open a pre-filled GitHub issue form.
This form will have the full stack trace of the error that occurred and shouldn't contain any sensitive data.
This form will have the full stack trace of the error that ocurred and shouldn't contain any sensitive data.
### Suggesting Enhancements

View File

@ -1,5 +1,5 @@
# Stage 1: Lock python dependencies
FROM docker.io/python:3.9-slim-buster as locker
FROM python:3.9-slim-buster as locker
COPY ./Pipfile /app/
COPY ./Pipfile.lock /app/
@ -11,23 +11,38 @@ RUN pip install pipenv && \
pipenv lock -r --dev-only > requirements-dev.txt
# Stage 2: Build website
FROM docker.io/node as website-builder
FROM node as website-builder
COPY ./website /static/
ENV NODE_ENV=production
RUN cd /static && npm i && npm run build-docs-only
# Stage 3: Build webui
FROM docker.io/node as web-builder
# Stage 3: Generate API Client
FROM openapitools/openapi-generator-cli as go-api-builder
COPY ./schema.yml /local/schema.yml
RUN docker-entrypoint.sh generate \
--git-host goauthentik.io \
--git-repo-id outpost \
--git-user-id api \
-i /local/schema.yml \
-g go \
-o /local/api \
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true && \
rm -f /local/api/go.mod /local/api/go.sum
# Stage 4: Build webui
FROM node as web-builder
COPY ./web /static/
ENV NODE_ENV=production
RUN cd /static && npm i && npm run build
# Stage 4: Build go proxy
FROM docker.io/golang:1.17.1 AS builder
# Stage 5: Build go proxy
FROM golang:1.17.0 AS builder
WORKDIR /work
@ -37,6 +52,7 @@ COPY --from=web-builder /static/dist/ /work/web/dist/
COPY --from=web-builder /static/authentik/ /work/web/authentik/
COPY --from=website-builder /static/help/ /work/website/help/
COPY --from=go-api-builder /local/api api
COPY ./cmd /work/cmd
COPY ./web/static.go /work/web/static.go
COPY ./website/static.go /work/website/static.go
@ -46,8 +62,8 @@ COPY ./go.sum /work/go.sum
RUN go build -o /work/authentik ./cmd/server/main.go
# Stage 5: Run
FROM docker.io/python:3.9-slim-buster
# Stage 6: Run
FROM python:3.9-slim-buster
WORKDIR /
COPY --from=locker /app/requirements.txt /
@ -81,7 +97,7 @@ COPY --from=builder /work/authentik /authentik-proxy
USER authentik
ENV TMPDIR /dev/shm/
ENV PYTHONUNBUFFERED 1
ENV PYTHONUBUFFERED 1
ENV prometheus_multiproc_dir /dev/shm/
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
ENTRYPOINT [ "/lifecycle/ak" ]

View File

@ -20,7 +20,6 @@ test:
lint-fix:
isort authentik tests lifecycle
black authentik tests lifecycle
codespell -I .github/codespell-words.txt -S 'web/src/locales/**' -w authentik internal cmd web/src website/src
lint:
pyright authentik tests lifecycle
@ -63,10 +62,10 @@ gen-outpost:
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,disallowAdditionalPropertiesIfNotPresent=false
rm -f api/go.mod api/go.sum
gen: gen-build gen-clean gen-web
gen: gen-build gen-clean gen-web gen-outpost
migrate:
python -m lifecycle.migrate
run:
WORKERS=1 go run -v cmd/server/main.go
go run -v cmd/server/main.go

View File

@ -48,7 +48,9 @@ duo-client = "*"
ua-parser = "*"
deepmerge = "*"
colorama = "*"
codespell = "*"
[requires]
python_version = "3.9"
[dev-packages]
bandit = "*"

399
Pipfile.lock generated
View File

@ -1,10 +1,12 @@
{
"_meta": {
"hash": {
"sha256": "babb6061c555f8f239f00210b2a0356763bdaaca2f3d704cf3444891b84db84d"
"sha256": "f0befa9b3dacc1c3363b9442fa7a43f6be2c46a8fcb80a994230d288a384e54d"
},
"pipfile-spec": 6,
"requires": {},
"requires": {
"python_version": "3.9"
},
"sources": [
{
"name": "pypi",
@ -120,27 +122,27 @@
},
"boto3": {
"hashes": [
"sha256:7b45b224442c479de4bc6e6e9cb0557b642fc7a77edc8702e393ccaa2e0aa128",
"sha256:c388da7dc1a596755f39de990a72e05cee558d098e81de63de55bd9598cc5134"
"sha256:4df1085f5c24504a1b1a6584947f27b67c26eda123f29d3cecce9b2fd683e09b",
"sha256:a7fccb61d95230322dd812629455df14167307c569077fa89d297eae73605ffb"
],
"index": "pypi",
"version": "==1.18.48"
"version": "==1.18.36"
},
"botocore": {
"hashes": [
"sha256:17a10dd33334e7e3aaa4e12f66317284f96bb53267e20bc877a187c442681772",
"sha256:2089f9fa36a59d8c02435c49d58ccc7b3ceb9c0c054ea4f71631c3c3a1c5245e"
"sha256:5b9a7d30e44b8a0a2bbbde62ae01bf6c349017e836985a0248552b00bbce7fae",
"sha256:e3e522fbe0bad1197aa7182451dc05f650310e77cf0a77749f6a5e82794c53de"
],
"markers": "python_version >= '3.6'",
"version": "==1.21.51"
"version": "==1.21.36"
},
"cachetools": {
"hashes": [
"sha256:0a3d3556c2c3befdbba2f93b78792c199c66201c999e97947ea0b7437758246b",
"sha256:6a6fa6802188ab7e77bab2db001d676e854499552b0037d999d5b9f211db5250"
"sha256:2cc0b89715337ab6dbba85b5b50effe2b0c74e035d83ee8ed637cf52f12ae001",
"sha256:61b5ed1e22a0924aed1d23b478f37e8d52549ff8a961de2909c69bf950020cff"
],
"markers": "python_version ~= '3.5'",
"version": "==4.2.3"
"version": "==4.2.2"
},
"cbor2": {
"hashes": [
@ -252,11 +254,11 @@
},
"charset-normalizer": {
"hashes": [
"sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6",
"sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f"
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
],
"markers": "python_version >= '3'",
"version": "==2.0.6"
"version": "==2.0.4"
},
"click": {
"hashes": [
@ -268,11 +270,9 @@
},
"click-didyoumean": {
"hashes": [
"sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667",
"sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"
"sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb"
],
"markers": "python_version < '4' and python_full_version >= '3.6.2'",
"version": "==0.3.0"
"version": "==0.0.3"
},
"click-plugins": {
"hashes": [
@ -288,14 +288,6 @@
],
"version": "==0.2.0"
},
"codespell": {
"hashes": [
"sha256:19d3fe5644fef3425777e66f225a8c82d39059dcfe9edb3349a8a2cf48383ee5",
"sha256:b864c7d917316316ac24272ee992d7937c3519be4569209c5b60035ac5d569b5"
],
"index": "pypi",
"version": "==2.1.0"
},
"colorama": {
"hashes": [
"sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
@ -313,28 +305,25 @@
},
"cryptography": {
"hashes": [
"sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6",
"sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6",
"sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c",
"sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999",
"sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e",
"sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992",
"sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d",
"sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588",
"sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa",
"sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d",
"sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd",
"sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d",
"sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953",
"sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2",
"sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8",
"sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6",
"sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9",
"sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6",
"sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad",
"sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"
"sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e",
"sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b",
"sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7",
"sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085",
"sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc",
"sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a",
"sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498",
"sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9",
"sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c",
"sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7",
"sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb",
"sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14",
"sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af",
"sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e",
"sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5",
"sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06",
"sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"
],
"version": "==35.0.0"
"version": "==3.4.8"
},
"dacite": {
"hashes": [
@ -382,11 +371,11 @@
},
"django-filter": {
"hashes": [
"sha256:632a251fa8f1aadb4b8cceff932bb52fe2f826dd7dfe7f3eac40e5c463d6836e",
"sha256:f4a6737a30104c98d2e2a5fb93043f36dd7978e0c7ddc92f5998e85433ea5063"
"sha256:84e9d5bb93f237e451db814ed422a3a625751cbc9968b484ecc74964a8696b06",
"sha256:e00d32cebdb3d54273c48f4f878f898dced8d5dfaad009438fe61ebdf535ace1"
],
"index": "pypi",
"version": "==21.1"
"version": "==2.4.0"
},
"django-guardian": {
"hashes": [
@ -406,11 +395,11 @@
},
"django-otp": {
"hashes": [
"sha256:0c03a471db9e876f3671314bc9a65bd56a5c3c108ee0562c473701310bba4a77",
"sha256:4c90cdaed683d736b0efafc034a3c6b410e1be2a53c24da287165b1f371d8776"
"sha256:01b5888f0bde5125e139433aacb947e52d5c406fa56c9db43c3e8d75b5c323c4",
"sha256:0d56dd2a7fbb6ee6e54557e036ca64add0bd3596f471794bad673b7637d5e935"
],
"index": "pypi",
"version": "==1.1.1"
"version": "==1.0.6"
},
"django-prometheus": {
"hashes": [
@ -462,11 +451,11 @@
},
"drf-spectacular": {
"hashes": [
"sha256:65df818226477cdfa629947ea52bc0cc13eb40550b192eeccec64a6b782651fd",
"sha256:f71205da3645d770545abeaf48e8a15afd6ee9a76e57c03df4592e51be1059bf"
"sha256:47ef6ec8ff48ac8aede6ec12450a55fee381cf84de969ef1724dcde5a93de6b8",
"sha256:d746b936cb4cddec380ea95bf91de6a6721777dfc42e0eea53b83c61a625e94e"
],
"index": "pypi",
"version": "==0.19.0"
"version": "==0.18.2"
},
"duo-client": {
"hashes": [
@ -493,19 +482,19 @@
},
"geoip2": {
"hashes": [
"sha256:f150bed3190d543712a17467208388d31bd8ddb49b2226fba53db8aaedb8ba89",
"sha256:f9172cdfb2a5f9225ace5e30dd7426413ad28798a5f474cd1538780686bd6a87"
"sha256:906a1dbf15a179a1af3522970e8420ab15bb3e0afc526942cc179e12146d9c1d",
"sha256:b97b44031fdc463e84eb1316b4f19edd978cb1d78703465fcb1e36dc5a822ba6"
],
"index": "pypi",
"version": "==4.4.0"
"version": "==4.2.0"
},
"google-auth": {
"hashes": [
"sha256:2a92b485afed5292946b324e91fcbe03db277ee4cb64c998c6cfa66d4af01dee",
"sha256:6dc8173abd50f25b6e62fc5b42802c96fc7cd9deb9bfeeb10a79f5606225cdf4"
"sha256:104475dc4d57bbae49017aea16fffbb763204fa2d6a70f1f3cc79962c1a383a4",
"sha256:cde472372e030e1e0bc64dac00fb53e6c095d7ab641f4281e2c995e85e205d8b"
],
"markers": "python_version >= '3.6'",
"version": "==2.2.1"
"version": "==2.0.2"
},
"gunicorn": {
"hashes": [
@ -629,10 +618,10 @@
},
"jsonschema": {
"hashes": [
"sha256:bc51325b929171791c42ebc1c70b9713eb134d3bb8ebd5474c8b659b15be6d86",
"sha256:c773028c649441ab980015b5b622f4cd5134cf563daaf0235ca4b73cc3734f20"
"sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163",
"sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
],
"version": "==4.0.0"
"version": "==3.2.0"
},
"kombu": {
"hashes": [
@ -717,10 +706,10 @@
},
"maxminddb": {
"hashes": [
"sha256:e37707ec4fab115804670e0fb7aedb4b57075a8b6f80052bdc648d3c005184e5"
"sha256:47e86a084dd814fac88c99ea34ba3278a74bc9de5a25f4b815b608798747c7dc"
],
"markers": "python_version >= '3.6'",
"version": "==2.2.0"
"version": "==2.0.3"
},
"msgpack": {
"hashes": [
@ -911,39 +900,39 @@
},
"pycryptodome": {
"hashes": [
"sha256:04e14c732c3693d2830839feed5129286ce47ffa8bfe90e4ae042c773e51c677",
"sha256:11d3164fb49fdee000fde05baecce103c0c698168ef1a18d9c7429dd66f0f5bb",
"sha256:217dcc0c92503f7dd4b3d3b7d974331a4419f97f555c99a845c3b366fed7056b",
"sha256:24c1b7705d19d8ae3e7255431efd2e526006855df62620118dd7b5374c6372f6",
"sha256:309529d2526f3fb47102aeef376b3459110a6af7efb162e860b32e3a17a46f06",
"sha256:3a153658d97258ca20bf18f7fe31c09cc7c558b6f8974a6ec74e19f6c634bd64",
"sha256:3f9fb499e267039262569d08658132c9cd8b136bf1d8c56b72f70ed05551e526",
"sha256:3faa6ebd35c61718f3f8862569c1f38450c24f3ededb213e1a64806f02f584bc",
"sha256:40083b0d7f277452c7f2dd4841801f058cc12a74c219ee4110d65774c6a58bef",
"sha256:49e54f2245befb0193848c8c8031d8d1358ed4af5a1ae8d0a3ba669a5cdd3a72",
"sha256:4e8fc4c48365ce8a542fe48bf1360da05bb2851df12f64fc94d751705e7cdbe7",
"sha256:54d4e4d45f349d8c4e2f31c2734637ff62a844af391b833f789da88e43a8f338",
"sha256:66301e4c42dee43ee2da256625d3fe81ef98cc9924c2bd535008cc3ad8ded77b",
"sha256:6b45fcace5a5d9c57ba87cf804b161adc62aa826295ce7f7acbcbdc0df74ed37",
"sha256:7efec2418e9746ec48e264eea431f8e422d931f71c57b1c96ee202b117f58fa9",
"sha256:851e6d4930b160417235955322db44adbdb19589918670d63f4acd5d92959ac0",
"sha256:8e82524e7c354033508891405574d12e612cc4fdd3b55d2c238fc1a3e300b606",
"sha256:8ec154ec445412df31acf0096e7f715e30e167c8f2318b8f5b1ab7c28f4c82f7",
"sha256:91ba4215a1f37d0f371fe43bc88c5ff49c274849f3868321c889313787de7672",
"sha256:97e7df67a4da2e3f60612bbfd6c3f243a63a15d8f4797dd275e1d7b44a65cb12",
"sha256:9a2312440057bf29b9582f72f14d79692044e63bfbc4b4bbea8559355f44f3dd",
"sha256:a7471646d8cd1a58bb696d667dcb3853e5c9b341b68dcf3c3cc0893d0f98ca5f",
"sha256:ac3012c36633564b2b5539bb7c6d9175f31d2ce74844e9abe654c428f02d0fd8",
"sha256:b1daf251395af7336ddde6a0015ba5e632c18fe646ba930ef87402537358e3b4",
"sha256:b217b4525e60e1af552d62bec01b4685095436d4de5ecde0f05d75b2f95ba6d4",
"sha256:c61ea053bd5d4c12a063d7e704fbe1c45abb5d2510dab55bd95d166ba661604f",
"sha256:c6469d1453f5864e3321a172b0aa671b938d753cbf2376b99fa2ab8841539bb8",
"sha256:cefe6b267b8e5c3c72e11adec35a9c7285b62e8ea141b63e87055e9a9e5f2f8c",
"sha256:d713dc0910e5ded07852a05e9b75f1dd9d3a31895eebee0668f612779b2a748c",
"sha256:db15fa07d2a4c00beeb5e9acdfdbc1c79f9ccfbdc1a8f36c82c4aa44951b33c9"
"sha256:09c1555a3fa450e7eaca41ea11cd00afe7c91fef52353488e65663777d8524e0",
"sha256:12222a5edc9ca4a29de15fbd5339099c4c26c56e13c2ceddf0b920794f26165d",
"sha256:1723ebee5561628ce96748501cdaa7afaa67329d753933296321f0be55358dce",
"sha256:1c5e1ca507de2ad93474be5cfe2bfa76b7cf039a1a32fc196f40935944871a06",
"sha256:2603c98ae04aac675fefcf71a6c87dc4bb74a75e9071ae3923bbc91a59f08d35",
"sha256:2dea65df54349cdfa43d6b2e8edb83f5f8d6861e5cf7b1fbc3e34c5694c85e27",
"sha256:31c1df17b3dc5f39600a4057d7db53ac372f492c955b9b75dd439f5d8b460129",
"sha256:38661348ecb71476037f1e1f553159b80d256c00f6c0b00502acac891f7116d9",
"sha256:3e2e3a06580c5f190df843cdb90ea28d61099cf4924334d5297a995de68e4673",
"sha256:3f840c49d38986f6e17dbc0673d37947c88bc9d2d9dba1c01b979b36f8447db1",
"sha256:501ab36aae360e31d0ec370cf5ce8ace6cb4112060d099b993bc02b36ac83fb6",
"sha256:60386d1d4cfaad299803b45a5bc2089696eaf6cdd56f9fc17479a6f89595cfc8",
"sha256:6260e24d41149268122dd39d4ebd5941e9d107f49463f7e071fd397e29923b0c",
"sha256:6bbf7fee7b7948b29d7e71fcacf48bac0c57fb41332007061a933f2d996f9713",
"sha256:6d2df5223b12437e644ce0a3be7809471ffa71de44ccd28b02180401982594a6",
"sha256:758949ca62690b1540dfb24ad773c6da9cd0e425189e83e39c038bbd52b8e438",
"sha256:77997519d8eb8a4adcd9a47b9cec18f9b323e296986528186c0e9a7a15d6a07e",
"sha256:7fd519b89585abf57bf47d90166903ec7b43af4fe23c92273ea09e6336af5c07",
"sha256:98213ac2b18dc1969a47bc65a79a8fca02a414249d0c8635abb081c7f38c91b6",
"sha256:99b2f3fc51d308286071d0953f92055504a6ffe829a832a9fc7a04318a7683dd",
"sha256:9b6f711b25e01931f1c61ce0115245a23cdc8b80bf8539ac0363bdcf27d649b6",
"sha256:a3105a0eb63eacf98c2ecb0eb4aa03f77f40fbac2bdde22020bb8a536b226bb8",
"sha256:a8eb8b6ea09ec1c2535bf39914377bc8abcab2c7d30fa9225eb4fe412024e427",
"sha256:a92d5c414e8ee1249e850789052608f582416e82422502dc0ac8c577808a9067",
"sha256:d3d6958d53ad307df5e8469cc44474a75393a434addf20ecd451f38a72fe29b8",
"sha256:e0a4d5933a88a2c98bbe19c0c722f5483dc628d7a38338ac2cb64a7dbd34064b",
"sha256:e3bf558c6aeb49afa9f0c06cee7fb5947ee5a1ff3bd794b653d39926b49077fa",
"sha256:e61e363d9a5d7916f3a4ce984a929514c0df3daf3b1b2eb5e6edbb131ee771cf",
"sha256:f977cdf725b20f6b8229b0c87acb98c7717e742ef9f46b113985303ae12a99da",
"sha256:fc7489a50323a0df02378bc2fff86eb69d94cc5639914346c736be981c6a02e7"
],
"index": "pypi",
"version": "==3.10.4"
"version": "==3.10.1"
},
"pyjwt": {
"hashes": [
@ -955,10 +944,10 @@
},
"pyopenssl": {
"hashes": [
"sha256:5e2d8c5e46d0d865ae933bef5230090bdaf5506281e9eec60fa250ee80600cb3",
"sha256:8935bd4920ab9abfebb07c41a4f58296407ed77f04bd1a92914044b848ba1ed6"
"sha256:4c231c759543ba02560fcd2480c48dcec4dae34c9da7d3747c508227e0624b51",
"sha256:818ae18e06922c066f777a33f1fca45786d85edfe71cd043de6379337a7f274b"
],
"version": "==21.0.0"
"version": "==20.0.1"
},
"pyparsing": {
"hashes": [
@ -1095,11 +1084,11 @@
},
"sentry-sdk": {
"hashes": [
"sha256:b9844751e40710e84a457c5bc29b21c383ccb2b63d76eeaad72f7f1c808c8828",
"sha256:c091cc7115ff25fe3a0e410dbecd7a996f81a3f6137d2272daef32d6c3cfa6dc"
"sha256:ebe99144fa9618d4b0e7617e7929b75acd905d258c3c779edcd34c0adfffe26c",
"sha256:f33d34c886d0ba24c75ea8885a8b3a172358853c7cbde05979fc99c29ef7bc52"
],
"index": "pypi",
"version": "==1.4.3"
"version": "==1.3.1"
},
"service-identity": {
"hashes": [
@ -1119,11 +1108,11 @@
},
"sqlparse": {
"hashes": [
"sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae",
"sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"
"sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0",
"sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"
],
"markers": "python_version >= '3.5'",
"version": "==0.4.2"
"version": "==0.4.1"
},
"structlog": {
"hashes": [
@ -1189,11 +1178,11 @@
"secure"
],
"hashes": [
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
],
"index": "pypi",
"version": "==1.26.7"
"version": "==1.26.6"
},
"uvicorn": {
"extras": [
@ -1267,33 +1256,41 @@
},
"websockets": {
"hashes": [
"sha256:01db0ecd1a0ca6702d02a5ed40413e18b7d22f94afb3bbe0d323bac86c42c1c8",
"sha256:085bb8a6e780d30eaa1ba48ac7f3a6707f925edea787cfb761ce5a39e77ac09b",
"sha256:1ac35426fe3e7d3d0fac3d63c8965c76ed67a8fd713937be072bf0ce22808539",
"sha256:1f6b814cff6aadc4288297cb3a248614829c6e4ff5556593c44a115e9dd49939",
"sha256:2a43072e434c041a99f2e1eb9b692df0232a38c37c61d00e9f24db79474329e4",
"sha256:5b2600e01c7ca6f840c42c747ffbe0254f319594ed108db847eb3d75f4aacb80",
"sha256:62160772314920397f9d219147f958b33fa27a12c662d4455c9ccbba9a07e474",
"sha256:706e200fc7f03bed99ad0574cd1ea8b0951477dd18cc978ccb190683c69dba76",
"sha256:71358c7816e2762f3e4af3adf0040f268e219f5a38cb3487a9d0fc2e554fef6a",
"sha256:7d2e12e4f901f1bc062dfdf91831712c4106ed18a9a4cdb65e2e5f502124ca37",
"sha256:7f79f02c7f9a8320aff7d3321cd1c7e3a7dbc15d922ac996cca827301ee75238",
"sha256:82b17524b1ce6ae7f7dd93e4d18e9b9474071e28b65dbf1dfe9b5767778db379",
"sha256:82bd921885231f4a30d9bc550552495b3fc36b1235add6d374e7c65c3babd805",
"sha256:8bbf8660c3f833ddc8b1afab90213f2e672a9ddac6eecb3cde968e6b2807c1c7",
"sha256:9a4d889162bd48588e80950e07fa5e039eee9deb76a58092e8c3ece96d7ef537",
"sha256:b4ade7569b6fd17912452f9c3757d96f8e4044016b6d22b3b8391e641ca50456",
"sha256:b8176deb6be540a46695960a765a77c28ac8b2e3ef2ec95d50a4f5df901edb1c",
"sha256:c4fc9a1d242317892590abe5b61a9127f1a61740477bfb121743f290b8054002",
"sha256:c5880442f5fc268f1ef6d37b2c152c114deccca73f48e3a8c48004d2f16f4567",
"sha256:cd8c6f2ec24aedace251017bc7a414525171d4e6578f914acab9349362def4da",
"sha256:d67646ddd17a86117ae21c27005d83c1895c0cef5d7be548b7549646372f868a",
"sha256:e42a1f1e03437b017af341e9bbfdc09252cd48ef32a8c3c3ead769eab3b17368",
"sha256:eb282127e9c136f860c6068a4fba5756eb25e755baffb5940b6f1eae071928b2",
"sha256:fe83b3ec9ef34063d86dfe1029160a85f24a5a94271036e5714a57acfdd089a1",
"sha256:ff59c6bdb87b31f7e2d596f09353d5a38c8c8ff571b0e2238e8ee2d55ad68465"
"sha256:0dd4eb8e0bbf365d6f652711ce21b8fd2b596f873d32aabb0fbb53ec604418cc",
"sha256:1d0971cc7251aeff955aa742ec541ee8aaea4bb2ebf0245748fbec62f744a37e",
"sha256:1d6b4fddb12ab9adf87b843cd4316c4bd602db8d5efd2fb83147f0458fe85135",
"sha256:230a3506df6b5f446fed2398e58dcaafdff12d67fe1397dff196411a9e820d02",
"sha256:276d2339ebf0df4f45df453923ebd2270b87900eda5dfd4a6b0cfa15f82111c3",
"sha256:2cf04601633a4ec176b9cc3d3e73789c037641001dbfaf7c411f89cd3e04fcaf",
"sha256:3ddff38894c7857c476feb3538dd847514379d6dc844961dc99f04b0384b1b1b",
"sha256:48c222feb3ced18f3dc61168ca18952a22fb88e5eb8902d2bf1b50faefdc34a2",
"sha256:51d04df04ed9d08077d10ccbe21e6805791b78eac49d16d30a1f1fe2e44ba0af",
"sha256:597c28f3aa7a09e8c070a86b03107094ee5cdafcc0d55f2f2eac92faac8dc67d",
"sha256:5c8f0d82ea2468282e08b0cf5307f3ad022290ed50c45d5cb7767957ca782880",
"sha256:7189e51955f9268b2bdd6cc537e0faa06f8fffda7fb386e5922c6391de51b077",
"sha256:7df3596838b2a0c07c6f6d67752c53859a54993d4f062689fdf547cb56d0f84f",
"sha256:826ccf85d4514609219725ba4a7abd569228c2c9f1968e8be05be366f68291ec",
"sha256:836d14eb53b500fd92bd5db2fc5894f7c72b634f9c2a28f546f75967503d8e25",
"sha256:85db8090ba94e22d964498a47fdd933b8875a1add6ebc514c7ac8703eb97bbf0",
"sha256:85e701a6c316b7067f1e8675c638036a796fe5116783a4c932e7eb8e305a3ffe",
"sha256:900589e19200be76dd7cbaa95e9771605b5ce3f62512d039fb3bc5da9014912a",
"sha256:9147868bb0cc01e6846606cd65cbf9c58598f187b96d14dd1ca17338b08793bb",
"sha256:9e7fdc775fe7403dbd8bc883ba59576a6232eac96dacb56512daacf7af5d618d",
"sha256:ab5ee15d3462198c794c49ccd31773d8a2b8c17d622aa184f669d2b98c2f0857",
"sha256:ad893d889bc700a5835e0a95a3e4f2c39e91577ab232a3dc03c262a0f8fc4b5c",
"sha256:b2e71c4670ebe1067fa8632f0d081e47254ee2d3d409de54168b43b0ba9147e0",
"sha256:b43b13e5622c5a53ab12f3272e6f42f1ce37cd5b6684b2676cb365403295cd40",
"sha256:b4ad84b156cf50529b8ac5cc1638c2cf8680490e3fccb6121316c8c02620a2e4",
"sha256:be5fd35e99970518547edc906efab29afd392319f020c3c58b0e1a158e16ed20",
"sha256:caa68c95bc1776d3521f81eeb4d5b9438be92514ec2a79fececda814099c8314",
"sha256:d144b350045c53c8ff09aa1cfa955012dd32f00c7e0862c199edcabb1a8b32da",
"sha256:d2c2d9b24d3c65b5a02cac12cbb4e4194e590314519ed49db2f67ef561c3cf58",
"sha256:e9e5fd6dbdf95d99bc03732ded1fc8ef22ebbc05999ac7e0c7bf57fe6e4e5ae2",
"sha256:ebf459a1c069f9866d8569439c06193c586e72c9330db1390af7c6a0a32c4afd",
"sha256:f31722f1c033c198aa4a39a01905951c00bd1c74f922e8afc1b1c62adbcdd56a",
"sha256:f68c352a68e5fdf1e97288d5cec9296664c590c25932a8476224124aaf90dbcd"
],
"version": "==10.0"
"version": "==9.1"
},
"xmlsec": {
"hashes": [
@ -1423,11 +1420,11 @@
},
"astroid": {
"hashes": [
"sha256:dcc06f6165f415220013801642bd6c9808a02967070919c4b746c6864c205471",
"sha256:fe81f80c0b35264acb5653302ffbd935d394f1775c5e4487df745bf9c2442708"
"sha256:3b680ce0419b8a771aba6190139a3998d14b413852506d99aff8dc2bf65ee67c",
"sha256:dc1e8b28427d6bbef6b8842b18765ab58f558c42bb80540bd7648c98412af25e"
],
"markers": "python_version ~= '3.6'",
"version": "==2.8.0"
"version": "==2.7.3"
},
"attrs": {
"hashes": [
@ -1470,11 +1467,11 @@
},
"charset-normalizer": {
"hashes": [
"sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6",
"sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f"
"sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b",
"sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
],
"markers": "python_version >= '3'",
"version": "==2.0.6"
"version": "==2.0.4"
},
"click": {
"hashes": [
@ -1560,11 +1557,11 @@
},
"gitpython": {
"hashes": [
"sha256:dc0a7f2f697657acc8d7f89033e8b1ea94dd90356b2983bca89dc8d2ab3cc647",
"sha256:df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5"
"sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b",
"sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8"
],
"markers": "python_version >= '3.7'",
"version": "==3.1.24"
"markers": "python_version >= '3.6'",
"version": "==3.1.18"
},
"idna": {
"hashes": [
@ -1655,11 +1652,11 @@
},
"platformdirs": {
"hashes": [
"sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2",
"sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"
"sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f",
"sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648"
],
"markers": "python_version >= '3.6'",
"version": "==2.4.0"
"version": "==2.3.0"
},
"pluggy": {
"hashes": [
@ -1679,11 +1676,11 @@
},
"pylint": {
"hashes": [
"sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126",
"sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"
"sha256:6758cce3ddbab60c52b57dcc07f0c5d779e5daf0cf50f6faacbef1d3ea62d2a1",
"sha256:e178e96b6ba171f8ef51fbce9ca30931e6acbea4a155074d80cc081596c9e852"
],
"index": "pypi",
"version": "==2.11.1"
"version": "==2.10.2"
},
"pylint-django": {
"hashes": [
@ -1761,49 +1758,49 @@
},
"regex": {
"hashes": [
"sha256:0628ed7d6334e8f896f882a5c1240de8c4d9b0dd7c7fb8e9f4692f5684b7d656",
"sha256:09eb62654030f39f3ba46bc6726bea464069c29d00a9709e28c9ee9623a8da4a",
"sha256:0bba1f6df4eafe79db2ecf38835c2626dbd47911e0516f6962c806f83e7a99ae",
"sha256:10a7a9cbe30bd90b7d9a1b4749ef20e13a3528e4215a2852be35784b6bd070f0",
"sha256:17310b181902e0bb42b29c700e2c2346b8d81f26e900b1328f642e225c88bce1",
"sha256:1e8d1898d4fb817120a5f684363b30108d7b0b46c7261264b100d14ec90a70e7",
"sha256:2054dea683f1bda3a804fcfdb0c1c74821acb968093d0be16233873190d459e3",
"sha256:29385c4dbb3f8b3a55ce13de6a97a3d21bd00de66acd7cdfc0b49cb2f08c906c",
"sha256:295bc8a13554a25ad31e44c4bedabd3c3e28bba027e4feeb9bb157647a2344a7",
"sha256:2cdb3789736f91d0b3333ac54d12a7e4f9efbc98f53cb905d3496259a893a8b3",
"sha256:3baf3eaa41044d4ced2463fd5d23bf7bd4b03d68739c6c99a59ce1f95599a673",
"sha256:4e61100200fa6ab7c99b61476f9f9653962ae71b931391d0264acfb4d9527d9c",
"sha256:6266fde576e12357b25096351aac2b4b880b0066263e7bc7a9a1b4307991bb0e",
"sha256:650c4f1fc4273f4e783e1d8e8b51a3e2311c2488ba0fcae6425b1e2c248a189d",
"sha256:658e3477676009083422042c4bac2bdad77b696e932a3de001c42cc046f8eda2",
"sha256:6adc1bd68f81968c9d249aab8c09cdc2cbe384bf2d2cb7f190f56875000cdc72",
"sha256:6c4d83d21d23dd854ffbc8154cf293f4e43ba630aa9bd2539c899343d7f59da3",
"sha256:6f74b6d8f59f3cfb8237e25c532b11f794b96f5c89a6f4a25857d85f84fbef11",
"sha256:7783d89bd5413d183a38761fbc68279b984b9afcfbb39fa89d91f63763fbfb90",
"sha256:7e3536f305f42ad6d31fc86636c54c7dafce8d634e56fef790fbacb59d499dd5",
"sha256:821e10b73e0898544807a0692a276e539e5bafe0a055506a6882814b6a02c3ec",
"sha256:835962f432bce92dc9bf22903d46c50003c8d11b1dc64084c8fae63bca98564a",
"sha256:85c61bee5957e2d7be390392feac7e1d7abd3a49cbaed0c8cee1541b784c8561",
"sha256:86f9931eb92e521809d4b64ec8514f18faa8e11e97d6c2d1afa1bcf6c20a8eab",
"sha256:8a5c2250c0a74428fd5507ae8853706fdde0f23bfb62ee1ec9418eeacf216078",
"sha256:8aec4b4da165c4a64ea80443c16e49e3b15df0f56c124ac5f2f8708a65a0eddc",
"sha256:8c268e78d175798cd71d29114b0a1f1391c7d011995267d3b62319ec1a4ecaa1",
"sha256:8d80087320632457aefc73f686f66139801959bf5b066b4419b92be85be3543c",
"sha256:95e89a8558c8c48626dcffdf9c8abac26b7c251d352688e7ab9baf351e1c7da6",
"sha256:9c371dd326289d85906c27ec2bc1dcdedd9d0be12b543d16e37bad35754bde48",
"sha256:9c7cb25adba814d5f419733fe565f3289d6fa629ab9e0b78f6dff5fa94ab0456",
"sha256:a731552729ee8ae9c546fb1c651c97bf5f759018fdd40d0e9b4d129e1e3a44c8",
"sha256:aea4006b73b555fc5bdb650a8b92cf486d678afa168cf9b38402bb60bf0f9c18",
"sha256:b0e3f59d3c772f2c3baaef2db425e6fc4149d35a052d874bb95ccfca10a1b9f4",
"sha256:b15dc34273aefe522df25096d5d087abc626e388a28a28ac75a4404bb7668736",
"sha256:c000635fd78400a558bd7a3c2981bb2a430005ebaa909d31e6e300719739a949",
"sha256:c31f35a984caffb75f00a86852951a337540b44e4a22171354fb760cefa09346",
"sha256:c50a6379763c733562b1fee877372234d271e5c78cd13ade5f25978aa06744db",
"sha256:c94722bf403b8da744b7d0bb87e1f2529383003ceec92e754f768ef9323f69ad",
"sha256:dcbbc9cfa147d55a577d285fd479b43103188855074552708df7acc31a476dd9",
"sha256:fb9f5844db480e2ef9fce3a72e71122dd010ab7b2920f777966ba25f7eb63819"
"sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468",
"sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354",
"sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308",
"sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d",
"sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc",
"sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8",
"sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797",
"sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2",
"sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13",
"sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d",
"sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a",
"sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0",
"sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73",
"sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1",
"sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed",
"sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a",
"sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b",
"sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f",
"sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256",
"sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb",
"sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2",
"sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983",
"sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb",
"sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645",
"sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8",
"sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a",
"sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906",
"sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f",
"sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c",
"sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892",
"sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0",
"sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e",
"sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e",
"sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed",
"sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c",
"sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374",
"sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd",
"sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791",
"sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a",
"sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1",
"sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759"
],
"version": "==2021.9.24"
"version": "==2021.8.28"
},
"requests": {
"hashes": [
@ -1861,24 +1858,16 @@
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.2"
},
"typing-extensions": {
"hashes": [
"sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e",
"sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7",
"sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"
],
"version": "==3.10.0.2"
},
"urllib3": {
"extras": [
"secure"
],
"hashes": [
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
"sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4",
"sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
],
"index": "pypi",
"version": "==1.26.7"
"version": "==1.26.6"
},
"wrapt": {
"hashes": [

View File

@ -1,3 +1,3 @@
"""authentik"""
__version__ = "2021.9.5"
__version__ = "2021.8.5"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -84,7 +84,7 @@ class SystemSerializer(PassiveSerializer):
return now()
def get_embedded_outpost_host(self, request: Request) -> str:
"""Get the FQDN configured on the embedded outpost"""
"""Get the FQDN configured on the embeddded outpost"""
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts.exists():
return ""

View File

@ -6,14 +6,12 @@ from django.core.cache import cache
from django.core.validators import URLValidator
from packaging.version import parse
from prometheus_client import Info
from requests import RequestException
from requests import RequestException, get
from structlog.stdlib import get_logger
from authentik import ENV_GIT_HASH_KEY, __version__
from authentik.events.models import Event, EventAction
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
from authentik.lib.config import CONFIG
from authentik.lib.utils.http import get_http_session
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
@ -38,17 +36,12 @@ def _set_prom_info():
@CELERY_APP.task(bind=True, base=MonitoredTask)
def update_latest_version(self: MonitoredTask):
"""Update latest version info"""
if CONFIG.y_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
return
try:
response = get_http_session().get(
"https://version.goauthentik.io/version.json",
)
response = get("https://api.github.com/repos/goauthentik/authentik/releases/latest")
response.raise_for_status()
data = response.json()
upstream_version = data.get("stable", {}).get("version")
tag_name = data.get("tag_name")
upstream_version = tag_name.split("/")[1]
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
self.set_status(
TaskResult(TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"])
@ -65,7 +58,7 @@ def update_latest_version(self: MonitoredTask):
).exists():
return
event_dict = {"new_version": upstream_version}
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
if match := re.search(URL_FINDER, data.get("body", "")):
event_dict["message"] = f"Changelog: {match.group()}"
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
except (RequestException, IndexError) as exc:

View File

@ -1,58 +1,81 @@
"""test admin tasks"""
import json
from dataclasses import dataclass
from unittest.mock import Mock, patch
from django.core.cache import cache
from django.test import TestCase
from requests_mock import Mocker
from requests.exceptions import RequestException
from authentik.admin.tasks import VERSION_CACHE_KEY, update_latest_version
from authentik.events.models import Event, EventAction
RESPONSE_VALID = {
"$schema": "https://version.goauthentik.io/schema.json",
"stable": {
"version": "99999999.9999999",
"changelog": "See https://goauthentik.io/test",
"reason": "bugfix",
},
}
@dataclass
class MockResponse:
"""Mock class to emulate the methods of requests's Response we need"""
status_code: int
response: str
def json(self) -> dict:
"""Get json parsed response"""
return json.loads(self.response)
def raise_for_status(self):
"""raise RequestException if status code is 400 or more"""
if self.status_code >= 400:
raise RequestException
REQUEST_MOCK_VALID = Mock(
return_value=MockResponse(
200,
"""{
"tag_name": "version/99999999.9999999",
"body": "https://goauthentik.io/test"
}""",
)
)
REQUEST_MOCK_INVALID = Mock(return_value=MockResponse(400, "{}"))
class TestAdminTasks(TestCase):
"""test admin tasks"""
@patch("authentik.admin.tasks.get", REQUEST_MOCK_VALID)
def test_version_valid_response(self):
"""Test Update checker with valid response"""
with Mocker() as mocker:
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
self.assertTrue(
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
self.assertTrue(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
).exists()
)
# test that a consecutive check doesn't create a duplicate event
update_latest_version.delay().get()
self.assertEqual(
len(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
).exists()
)
# test that a consecutive check doesn't create a duplicate event
update_latest_version.delay().get()
self.assertEqual(
len(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE,
context__new_version="99999999.9999999",
context__message="Changelog: https://goauthentik.io/test",
)
),
1,
)
)
),
1,
)
@patch("authentik.admin.tasks.get", REQUEST_MOCK_INVALID)
def test_version_error(self):
"""Test Update checker with invalid response"""
with Mocker() as mocker:
mocker.get("https://version.goauthentik.io/version.json", status_code=400)
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
self.assertFalse(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
).exists()
)
update_latest_version.delay().get()
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
self.assertFalse(
Event.objects.filter(
action=EventAction.UPDATE_AVAILABLE, context__new_version="0.0.0"
).exists()
)

View File

@ -40,6 +40,7 @@ def bearer_auth(raw_header: bytes) -> Optional[User]:
raise AuthenticationFailed("Malformed header")
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
if not tokens.exists():
LOGGER.info("Authenticating via secret_key")
user = token_secret_key(password)
if not user:
raise AuthenticationFailed("Token invalid/expired")
@ -57,7 +58,6 @@ def token_secret_key(value: str) -> Optional[User]:
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts:
return None
LOGGER.info("Authenticating via secret_key")
outpost = outposts.first()
return outpost.user

View File

@ -33,12 +33,3 @@ class OwnerPermissions(BasePermission):
if owner != request.user:
return False
return True
class OwnerSuperuserPermissions(OwnerPermissions):
"""Similar to OwnerPermissions, except always allow access for superusers"""
def has_object_permission(self, request: Request, view, obj: Model) -> bool:
if request.user.is_superuser:
return True
return super().has_object_permission(request, view, obj)

View File

@ -5,9 +5,6 @@ from typing import Callable, Optional
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
LOGGER = get_logger()
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
@ -21,12 +18,10 @@ def permission_required(perm: Optional[str] = None, other_perms: Optional[list[s
if perm:
obj = self.get_object()
if not request.user.has_perm(perm, obj):
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
return self.permission_denied(request)
if other_perms:
for other_perm in other_perms:
if not request.user.has_perm(other_perm):
LOGGER.debug("denying access for other", user=request.user, perm=perm)
return self.permission_denied(request)
return func(self, request, *args, **kwargs)

View File

@ -11,7 +11,7 @@ from drf_spectacular.types import OpenApiTypes
def build_standard_type(obj, **kwargs):
"""Build a basic type with optional add owns."""
"""Build a basic type with optional add ons."""
schema = build_basic_type(obj)
schema.update(kwargs)
return schema
@ -31,7 +31,7 @@ VALIDATION_ERROR = build_object_type(
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
"code": build_standard_type(OpenApiTypes.STR),
},
required=[],
required=["detail"],
additionalProperties={},
)

View File

@ -1,19 +0,0 @@
"""API tasks"""
from authentik.lib.utils.http import get_http_session
from authentik.root.celery import CELERY_APP
SENTRY_SESSION = get_http_session()
@CELERY_APP.task()
def sentry_proxy(payload: str):
"""Relay data to sentry"""
SENTRY_SESSION.post(
"https://sentry.beryju.org/api/8/envelope/",
data=payload,
headers={
"Content-Type": "application/octet-stream",
},
timeout=10,
)

View File

@ -63,7 +63,7 @@ class ConfigView(APIView):
@extend_schema(responses={200: ConfigSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Retrieve public configuration options"""
"""Retrive public configuration options"""
config = ConfigSerializer(
{
"error_reporting_enabled": CONFIG.y("error_reporting.enabled"),

View File

@ -4,19 +4,17 @@ from json import loads
from django.conf import settings
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from requests import post
from requests.exceptions import RequestException
from rest_framework.authentication import SessionAuthentication
from rest_framework.parsers import BaseParser
from rest_framework.permissions import AllowAny
from rest_framework.request import Request
from rest_framework.throttling import AnonRateThrottle
from rest_framework.views import APIView
from structlog.stdlib import get_logger
from authentik.api.tasks import sentry_proxy
from authentik.lib.config import CONFIG
LOGGER = get_logger()
class PlainTextParser(BaseParser):
"""Plain text parser."""
@ -48,18 +46,21 @@ class SentryTunnelView(APIView):
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
# Only allow usage of this endpoint when error reporting is enabled
if not CONFIG.y_bool("error_reporting.enabled", False):
LOGGER.debug("error reporting disabled")
return HttpResponse(status=400)
# Body is 2 json objects separated by \n
full_body = request.body
lines = full_body.splitlines()
if len(lines) < 1:
return HttpResponse(status=400)
header = loads(lines[0])
header = loads(full_body.splitlines()[0])
# Check that the DSN is what we expect
dsn = header.get("dsn", "")
if dsn != settings.SENTRY_DSN:
LOGGER.debug("Invalid dsn", have=dsn, expected=settings.SENTRY_DSN)
return HttpResponse(status=400)
sentry_proxy.delay(full_body.decode())
return HttpResponse(status=204)
response = post(
"https://sentry.beryju.org/api/8/envelope/",
data=full_body,
headers={"Content-Type": "application/octet-stream"},
)
try:
response.raise_for_status()
except RequestException:
return HttpResponse(status=500)
return HttpResponse(status=response.status_code)

View File

@ -24,7 +24,6 @@ from authentik.core.api.users import UserViewSet
from authentik.crypto.api import CertificateKeyPairViewSet
from authentik.events.api.event import EventViewSet
from authentik.events.api.notification import NotificationViewSet
from authentik.events.api.notification_mapping import NotificationWebhookMappingViewSet
from authentik.events.api.notification_rule import NotificationRuleViewSet
from authentik.events.api.notification_transport import NotificationTransportViewSet
from authentik.flows.api.bindings import FlowStageBindingViewSet
@ -99,7 +98,6 @@ from authentik.stages.user_write.api import UserWriteStageViewSet
from authentik.tenants.api import TenantViewSet
router = routers.DefaultRouter()
router.include_format_suffixes = False
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
router.register("admin/apps", AppsViewSet, basename="apps")
@ -161,7 +159,6 @@ router.register("propertymappings/all", PropertyMappingViewSet)
router.register("propertymappings/ldap", LDAPPropertyMappingViewSet)
router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
router.register("propertymappings/scope", ScopeMappingViewSet)
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
router.register("authenticators/duo", DuoDeviceViewSet)
router.register("authenticators/static", StaticDeviceViewSet)

View File

@ -11,7 +11,6 @@ from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import GenericViewSet
from ua_parser import user_agent_parser
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.core.models import AuthenticatedSession
from authentik.events.geo import GEOIP_READER, GeoIPDict
@ -103,8 +102,11 @@ class AuthenticatedSessionViewSet(
search_fields = ["user__username", "last_ip", "last_user_agent"]
filterset_fields = ["user__username", "last_ip", "last_user_agent"]
ordering = ["user__username"]
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
filter_backends = [
DjangoFilterBackend,
OrderingFilter,
SearchFilter,
]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()

View File

@ -2,7 +2,7 @@
from django.db.models.query import QuerySet
from django_filters.filters import ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from rest_framework.fields import CharField, JSONField
from rest_framework.fields import BooleanField, CharField, JSONField
from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
@ -15,6 +15,7 @@ from authentik.core.models import Group, User
class GroupMemberSerializer(ModelSerializer):
"""Stripped down user serializer to show relevant users for groups"""
is_superuser = BooleanField(read_only=True)
avatar = CharField(read_only=True)
attributes = JSONField(validators=[is_dict], required=False)
uid = CharField(read_only=True)
@ -28,6 +29,7 @@ class GroupMemberSerializer(ModelSerializer):
"name",
"is_active",
"last_login",
"is_superuser",
"email",
"avatar",
"attributes",

View File

@ -95,9 +95,7 @@ class SourceViewSet(
@action(detail=False, pagination_class=None, filter_backends=[])
def user_settings(self, request: Request) -> Response:
"""Get all sources the user can configure"""
_all_sources: Iterable[Source] = (
Source.objects.filter(enabled=True).select_subclasses().order_by("name")
)
_all_sources: Iterable[Source] = Source.objects.filter(enabled=True).select_subclasses()
matching_sources: list[UserSettingSerializer] = []
for source in _all_sources:
user_settings = source.ui_user_settings

View File

@ -2,19 +2,15 @@
from typing import Any
from django.http.response import Http404
from django_filters.rest_framework import DjangoFilterBackend
from drf_spectacular.utils import OpenApiResponse, extend_schema
from guardian.shortcuts import get_anonymous_user
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.authorization import OwnerSuperuserPermissions
from authentik.api.decorators import permission_required
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
@ -83,23 +79,13 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"expires",
"expiring",
]
ordering = ["identifier", "expires"]
permission_classes = [OwnerSuperuserPermissions]
filter_backends = [DjangoFilterBackend, OrderingFilter, SearchFilter]
def get_queryset(self):
user = self.request.user if self.request else get_anonymous_user()
if user.is_superuser:
return super().get_queryset()
return super().get_queryset().filter(user=user.pk)
ordering = ["expires"]
def perform_create(self, serializer: TokenSerializer):
if not self.request.user.is_superuser:
return serializer.save(
user=self.request.user,
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
)
return super().perform_create(serializer)
serializer.save(
user=self.request.user,
expiring=self.request.user.attributes.get(USER_ATTRIBUTE_TOKEN_EXPIRING, True),
)
@permission_required("authentik_core.view_token_key")
@extend_schema(

View File

@ -1,5 +1,4 @@
"""User API Views"""
from datetime import timedelta
from json import loads
from typing import Optional
@ -8,8 +7,6 @@ from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.urls import reverse_lazy
from django.utils.http import urlencode
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import gettext as _
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
@ -274,10 +271,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
)
group.users.add(user)
token = Token.objects.create(
identifier=slugify(f"service-account-{username}-password"),
identifier=f"service-account-{username}-password",
intent=TokenIntents.INTENT_APP_PASSWORD,
user=user,
expires=now() + timedelta(days=360),
)
return Response({"username": user.username, "token": token.key})
except (IntegrityError) as exc:
@ -308,7 +304,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
"""Allow users to change information on their own profile"""
data = UserSelfSerializer(instance=User.objects.get(pk=request.user.pk), data=request.data)
if not data.is_valid():
return Response(data.errors, status=400)
return Response(data.errors)
new_user = data.save()
# If we're impersonating, we need to update that user object
# since it caches the full object

View File

@ -26,7 +26,7 @@ class Migration(migrations.Migration):
),
(
"username_link",
"Link to a user with identical username. Can have security implications when a username is used with another source.",
"Link to a user with identical username address. Can have security implications when a username is used with another source.",
),
(
"username_deny",

View File

@ -283,7 +283,7 @@ class SourceUserMatchingModes(models.TextChoices):
)
USERNAME_LINK = "username_link", _(
(
"Link to a user with identical username. Can have security implications "
"Link to a user with identical username address. Can have security implications "
"when a username is used with another source."
)
)

View File

@ -184,7 +184,7 @@ class SourceFlowManager:
# Ensure redirect is carried through when user was trying to
# authorize application
final_redirect = self.request.session.get(SESSION_KEY_GET, {}).get(
NEXT_ARG_NAME, "authentik_core:if-user"
NEXT_ARG_NAME, "authentik_core:if-admin"
)
kwargs.update(
{
@ -243,9 +243,9 @@ class SourceFlowManager:
return self.handle_auth_user(connection)
return redirect(
reverse(
"authentik_core:if-user",
"authentik_core:if-admin",
)
+ f"#/settings;page-{self.source.slug}"
+ f"#/user;page-{self.source.slug}"
)
def handle_enroll(

View File

@ -28,7 +28,3 @@ class PostUserEnrollmentStage(StageView):
source=connection.source,
).from_http(self.request)
return self.executor.stage_ok()
def post(self, request: HttpRequest) -> HttpResponse:
"""Wrapper for post requests"""
return self.get(request)

View File

@ -8,15 +8,16 @@
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<title>{% block title %}{% trans title|default:tenant.branding_title %}{% endblock %}</title>
<link rel="shortcut icon" type="image/png" href="{% static 'dist/assets/icons/icon.png' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly-base.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}">
<link rel="shortcut icon" type="image/png" href="{% static 'dist/assets/icons/icon.png' %}?v={{ ak_version }}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly-base.css' %}?v={{ ak_version }}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/page.css' %}?v={{ ak_version }}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/empty-state.css' %}?v={{ ak_version }}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/spinner.css' %}?v={{ ak_version }}">
{% block head_before %}
{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<script src="{% static 'dist/poly.js' %}" type="module"></script>
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}?v={{ ak_version }}">
<script src="{% static 'dist/poly.js' %}?v={{ ak_version }}" type="module"></script>
<script>window["polymerSkipLoadingFontRoboto"] = true;</script>
{% block head %}
{% endblock %}
</head>

View File

@ -4,7 +4,7 @@
{% load i18n %}
{% block head %}
<script src="{% static 'dist/AdminInterface.js' %}" type="module"></script>
<script src="{% static 'dist/AdminInterface.js' %}?v={{ ak_version }}" type="module"></script>
{% endblock %}
{% block body %}

View File

@ -21,7 +21,7 @@ You've logged out of {{ application }}.
{% endblocktrans %}
</p>
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">{% trans 'Go back to overview' %}</a>
<a id="ak-back-home" href="{% url 'authentik_core:if-admin' %}" class="pf-c-button pf-m-primary">{% trans 'Go back to overview' %}</a>
<a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary">{% trans 'Log out of authentik' %}</a>

View File

@ -4,14 +4,13 @@
{% load i18n %}
{% block head_before %}
{{ block.super }}
{% if flow.compatibility_mode %}
<script>ShadyDOM = { force: !navigator.webdriver };</script>
{% endif %}
{% endblock %}
{% block head %}
<script src="{% static 'dist/FlowInterface.js' %}" type="module"></script>
<script src="{% static 'dist/FlowInterface.js' %}?v={{ ak_version }}" type="module"></script>
<style>
.pf-c-background-image::before {
--ak-flow-background: url("{{ flow.background_url }}");

View File

@ -1,28 +0,0 @@
{% extends "base/skeleton.html" %}
{% load static %}
{% load i18n %}
{% block head %}
<script src="{% static 'dist/UserInterface.js' %}" type="module"></script>
{% endblock %}
{% block body %}
<ak-message-container></ak-message-container>
<ak-interface-user>
<section class="ak-static-page pf-c-page__main-section pf-m-no-padding-mobile pf-m-xl">
<div class="pf-c-empty-state" style="height: 100vh;">
<div class="pf-c-empty-state__content">
<span class="pf-c-spinner pf-m-xl pf-c-empty-state__icon" role="progressbar" aria-valuetext="{% trans 'Loading...' %}">
<span class="pf-c-spinner__clipper"></span>
<span class="pf-c-spinner__lead-ball"></span>
<span class="pf-c-spinner__tail-ball"></span>
</span>
<h1 class="pf-c-title pf-m-lg">
{% trans "Loading..." %}
</h1>
</div>
</div>
</section>
</ak-interface-user>
{% endblock %}

View File

@ -4,7 +4,7 @@
{% load i18n %}
{% block head_before %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}?v={{ ak_version }}">
{% endblock %}
{% block head %}

View File

@ -58,4 +58,4 @@ class TestImpersonation(TestCase):
self.client.force_login(self.other_user)
response = self.client.get(reverse("authentik_core:impersonate-end"))
self.assertRedirects(response, reverse("authentik_core:if-user"))
self.assertRedirects(response, reverse("authentik_core:if-admin"))

View File

@ -1,6 +1,4 @@
"""Test token API"""
from json import loads
from django.urls.base import reverse
from django.utils.timezone import now
from guardian.shortcuts import get_anonymous_user
@ -15,8 +13,7 @@ class TestTokenAPI(APITestCase):
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create(username="testuser")
self.admin = User.objects.get(username="akadmin")
self.user = User.objects.get(username="akadmin")
self.client.force_login(self.user)
def test_token_create(self):
@ -58,29 +55,3 @@ class TestTokenAPI(APITestCase):
clean_expired_models.delay().get()
token.refresh_from_db()
self.assertNotEqual(key, token.key)
def test_list(self):
"""Test Token List (Test normal authentication)"""
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)
Token.objects.create(identifier="test-2", expiring=False, user=get_anonymous_user())
response = self.client.get(reverse(("authentik_api:token-list")))
body = loads(response.content)
self.assertEqual(len(body["results"]), 1)
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
def test_list_admin(self):
"""Test Token List (Test with admin auth)"""
self.client.force_login(self.admin)
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)
token_should_not: Token = Token.objects.create(
identifier="test-2", expiring=False, user=get_anonymous_user()
)
response = self.client.get(reverse(("authentik_api:token-list")))
body = loads(response.content)
self.assertEqual(len(body["results"]), 2)
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
self.assertEqual(body["results"][1]["identifier"], token_should_not.identifier)

View File

@ -12,7 +12,7 @@ from authentik.core.views.session import EndSessionView
urlpatterns = [
path(
"",
login_required(RedirectView.as_view(pattern_name="authentik_core:if-user")),
login_required(RedirectView.as_view(pattern_name="authentik_core:if-admin")),
name="root-redirect",
),
# Impersonation
@ -32,11 +32,6 @@ urlpatterns = [
ensure_csrf_cookie(TemplateView.as_view(template_name="if/admin.html")),
name="if-admin",
),
path(
"if/user/",
ensure_csrf_cookie(TemplateView.as_view(template_name="if/user.html")),
name="if-user",
),
path(
"if/flow/<slug:flow_slug>/",
ensure_csrf_cookie(FlowInterfaceView.as_view()),

View File

@ -28,7 +28,7 @@ class ImpersonateInitView(View):
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
return redirect("authentik_core:if-user")
return redirect("authentik_core:if-admin")
class ImpersonateEndView(View):
@ -41,7 +41,7 @@ class ImpersonateEndView(View):
or SESSION_IMPERSONATE_ORIGINAL_USER not in request.session
):
LOGGER.debug("Can't end impersonation", user=request.user)
return redirect("authentik_core:if-user")
return redirect("authentik_core:if-admin")
original_user = request.session[SESSION_IMPERSONATE_ORIGINAL_USER]

View File

@ -78,7 +78,9 @@ class CertificateKeyPair(CreatedUpdatedModel):
@property
def kid(self):
"""Get Key ID used for JWKS"""
return md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
return "{0}".format(
md5(self.key_data.encode("utf-8")).hexdigest() if self.key_data else "" # nosec
)
def __str__(self) -> str:
return f"Certificate-Key Pair {self.name}"

View File

@ -1,28 +0,0 @@
"""NotificationWebhookMapping API Views"""
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.events.models import NotificationWebhookMapping
class NotificationWebhookMappingSerializer(ModelSerializer):
"""NotificationWebhookMapping Serializer"""
class Meta:
model = NotificationWebhookMapping
fields = [
"pk",
"name",
"expression",
]
class NotificationWebhookMappingViewSet(UsedByMixin, ModelViewSet):
"""NotificationWebhookMapping Viewset"""
queryset = NotificationWebhookMapping.objects.all()
serializer_class = NotificationWebhookMappingSerializer
filterset_fields = ["name"]
ordering = ["name"]

View File

@ -1,10 +1,7 @@
"""NotificationTransport API Views"""
from typing import Any
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, ListField, SerializerMethodField
from rest_framework.request import Request
from rest_framework.response import Response
@ -32,14 +29,6 @@ class NotificationTransportSerializer(ModelSerializer):
"""Return selected mode with a UI Label"""
return TransportMode(instance.mode).label
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
"""Ensure the required fields are set."""
mode = attrs.get("mode")
if mode in [TransportMode.WEBHOOK, TransportMode.WEBHOOK_SLACK]:
if "webhook_url" not in attrs or attrs.get("webhook_url", "") == "":
raise ValidationError("Webhook URL may not be empty.")
return attrs
class Meta:
model = NotificationTransport
@ -49,7 +38,6 @@ class NotificationTransportSerializer(ModelSerializer):
"mode",
"mode_verbose",
"webhook_url",
"webhook_mapping",
"send_once",
]

View File

@ -1,46 +0,0 @@
# Generated by Django 3.2.6 on 2021-09-11 22:17
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0028_alter_token_intent"),
("authentik_events", "0017_alter_event_action"),
]
operations = [
migrations.CreateModel(
name="NotificationWebhookMapping",
fields=[
(
"propertymapping_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_core.propertymapping",
),
),
],
options={
"verbose_name": "Notification Webhook Mapping",
"verbose_name_plural": "Notification Webhook Mappings",
},
bases=("authentik_core.propertymapping",),
),
migrations.AddField(
model_name="notificationtransport",
name="webhook_mapping",
field=models.ForeignKey(
default=None,
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="authentik_events.notificationwebhookmapping",
),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 3.2.7 on 2021-10-04 15:31
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_events", "0018_auto_20210911_2217"),
]
operations = [
migrations.AlterField(
model_name="notificationtransport",
name="webhook_url",
field=models.TextField(blank=True, validators=[django.core.validators.URLValidator()]),
),
]

View File

@ -2,26 +2,24 @@
from datetime import timedelta
from inspect import getmodule, stack
from smtplib import SMTPException
from typing import TYPE_CHECKING, Optional, Type, Union
from typing import Optional, Union
from uuid import uuid4
from django.conf import settings
from django.core.validators import URLValidator
from django.db import models
from django.http import HttpRequest
from django.http.request import QueryDict
from django.utils.timezone import now
from django.utils.translation import gettext as _
from requests import RequestException
from requests import RequestException, post
from structlog.stdlib import get_logger
from authentik import __version__
from authentik.core.middleware import SESSION_IMPERSONATE_ORIGINAL_USER, SESSION_IMPERSONATE_USER
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
from authentik.core.models import ExpiringModel, Group, User
from authentik.events.geo import GEOIP_READER
from authentik.events.utils import cleanse_dict, get_user, model_to_dict, sanitize_dict
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import get_client_ip, get_http_session
from authentik.lib.utils.http import get_client_ip
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.stages.email.utils import TemplateEmailMessage
@ -29,8 +27,6 @@ from authentik.tenants.models import Tenant
from authentik.tenants.utils import DEFAULT_TENANT
LOGGER = get_logger("authentik.events")
if TYPE_CHECKING:
from rest_framework.serializers import Serializer
def default_event_duration():
@ -141,9 +137,8 @@ class Event(ExpiringModel):
`user` arguments optionally overrides user from requests."""
if request:
self.context["http_request"] = {
"path": request.path,
"path": request.get_full_path(),
"method": request.method,
"args": QueryDict(request.META.get("QUERY_STRING", "")),
}
if hasattr(request, "tenant"):
tenant: Tenant = request.tenant
@ -224,10 +219,7 @@ class NotificationTransport(models.Model):
name = models.TextField(unique=True)
mode = models.TextField(choices=TransportMode.choices)
webhook_url = models.TextField(blank=True, validators=[URLValidator()])
webhook_mapping = models.ForeignKey(
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
)
webhook_url = models.TextField(blank=True)
send_once = models.BooleanField(
default=False,
help_text=_(
@ -247,22 +239,15 @@ class NotificationTransport(models.Model):
def send_webhook(self, notification: "Notification") -> list[str]:
"""Send notification to generic webhook"""
default_body = {
"body": notification.body,
"severity": notification.severity,
"user_email": notification.user.email,
"user_username": notification.user.username,
}
if self.webhook_mapping:
default_body = self.webhook_mapping.evaluate(
user=notification.user,
request=None,
notification=notification,
)
try:
response = get_http_session().post(
response = post(
self.webhook_url,
json=default_body,
json={
"body": notification.body,
"severity": notification.severity,
"user_email": notification.user.email,
"user_username": notification.user.username,
},
)
response.raise_for_status()
except RequestException as exc:
@ -312,7 +297,7 @@ class NotificationTransport(models.Model):
if notification.event:
body["attachments"][0]["title"] = notification.event.action
try:
response = get_http_session().post(self.webhook_url, json=body)
response = post(self.webhook_url, json=body)
response.raise_for_status()
except RequestException as exc:
text = exc.response.text if exc.response else str(exc)
@ -429,25 +414,3 @@ class NotificationRule(PolicyBindingModel):
verbose_name = _("Notification Rule")
verbose_name_plural = _("Notification Rules")
class NotificationWebhookMapping(PropertyMapping):
"""Modify the schema and layout of the webhook being sent"""
@property
def component(self) -> str:
return "ak-property-mapping-notification-form"
@property
def serializer(self) -> Type["Serializer"]:
from authentik.events.api.notification_mapping import NotificationWebhookMappingSerializer
return NotificationWebhookMappingSerializer
def __str__(self):
return f"Notification Webhook Mapping {self.name}"
class Meta:
verbose_name = _("Notification Webhook Mapping")
verbose_name_plural = _("Notification Webhook Mappings")

View File

@ -3,6 +3,7 @@ from dataclasses import dataclass, field
from datetime import datetime
from enum import Enum
from timeit import default_timer
from traceback import format_tb
from typing import Any, Optional
from celery import Task
@ -41,7 +42,8 @@ class TaskResult:
def with_error(self, exc: Exception) -> "TaskResult":
"""Since errors might not always be pickle-able, set the traceback"""
self.messages.extend(exception_to_string(exc).splitlines())
self.messages.extend(format_tb(exc.__traceback__))
self.messages.append(str(exc))
return self

View File

@ -4,13 +4,7 @@ from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import User
from authentik.events.models import (
Event,
EventAction,
Notification,
NotificationSeverity,
TransportMode,
)
from authentik.events.models import Event, EventAction, Notification, NotificationSeverity
class TestEventsAPI(APITestCase):
@ -47,23 +41,3 @@ class TestEventsAPI(APITestCase):
)
notification.refresh_from_db()
self.assertTrue(notification.seen)
def test_transport(self):
"""Test transport API"""
response = self.client.post(
reverse("authentik_api:notificationtransport-list"),
data={
"name": "foo-with",
"mode": TransportMode.WEBHOOK,
"webhook_url": "http://foo.com",
},
)
self.assertEqual(response.status_code, 201)
response = self.client.post(
reverse("authentik_api:notificationtransport-list"),
data={
"name": "foo-without",
"mode": TransportMode.WEBHOOK,
},
)
self.assertEqual(response.status_code, 400)

View File

@ -77,7 +77,7 @@ def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
final_dict = {}
for key, value in source.items():
if is_dataclass(value):
# Because asdict calls `copy.deepcopy(obj)` on everything that's not tuple/dict,
# Because asdict calls `copy.deepcopy(obj)` on everything thats not tuple/dict,
# and deepcopy doesn't work with HttpRequests (neither django nor rest_framework).
# Currently, the only dataclass that actually holds an http request is a PolicyRequest
if isinstance(value, PolicyRequest):

View File

@ -86,7 +86,7 @@ class StageViewSet(
@action(detail=False, pagination_class=None, filter_backends=[])
def user_settings(self, request: Request) -> Response:
"""Get all stages the user can configure"""
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses().order_by("name")
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses()
matching_stages: list[dict] = []
for stage in _all_stages:
user_settings = stage.ui_user_settings

View File

@ -57,11 +57,11 @@ class FlowPlan:
markers: list[StageMarker] = field(default_factory=list)
def append_stage(self, stage: Stage, marker: Optional[StageMarker] = None):
"""Append `stage` to all stages, optionally with stage marker"""
"""Append `stage` to all stages, optionall with stage marker"""
return self.append(FlowStageBinding(stage=stage), marker)
def append(self, binding: FlowStageBinding, marker: Optional[StageMarker] = None):
"""Append `stage` to all stages, optionally with stage marker"""
"""Append `stage` to all stages, optionall with stage marker"""
self.bindings.append(binding)
self.markers.append(marker or StageMarker())

View File

@ -1,31 +0,0 @@
"""stage view tests"""
from typing import Callable, Type
from django.test import RequestFactory, TestCase
from authentik.flows.stage import StageView
from authentik.flows.views import FlowExecutorView
from authentik.lib.utils.reflection import all_subclasses
class TestViews(TestCase):
"""Generic model properties tests"""
def setUp(self) -> None:
self.factory = RequestFactory()
self.exec = FlowExecutorView(request=self.factory.get("/"))
def view_tester_factory(view_class: Type[StageView]) -> Callable:
"""Test a form"""
def tester(self: TestViews):
model_class = view_class(self.exec)
self.assertIsNotNone(model_class.post)
self.assertIsNotNone(model_class.get)
return tester
for view in all_subclasses(StageView):
setattr(TestViews, f"test_view_{view.__name__}", view_tester_factory(view))

View File

@ -438,7 +438,7 @@ class TestFlowExecutor(APITestCase):
# third request, this should trigger the re-evaluate
# A get request will evaluate the policies and this will return stage 4
# but it won't save it, hence we can't check the plan
# but it won't save it, hence we cant' check the plan
response = self.client.get(exec_url)
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(

View File

@ -11,7 +11,7 @@ from authentik.lib.sentry import SentryIgnoredException
def get_attrs(obj: SerializerModel) -> dict[str, Any]:
"""Get object's attributes via their serializer, and convert it to a normal dict"""
"""Get object's attributes via their serializer, and covert it to a normal dict"""
data = dict(obj.serializer(obj).data)
to_remove = (
"policies",

View File

@ -14,7 +14,12 @@ from django.utils.decorators import method_decorator
from django.views.decorators.clickjacking import xframe_options_sameorigin
from django.views.generic import View
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, PolymorphicProxySerializer, extend_schema
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
PolymorphicProxySerializer,
extend_schema,
)
from rest_framework.permissions import AllowAny
from rest_framework.views import APIView
from sentry_sdk import capture_exception
@ -126,12 +131,12 @@ class FlowExecutorView(APIView):
# pylint: disable=unused-argument, too-many-return-statements
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
# Early check if there's an active Plan for the current session
# Early check if theres an active Plan for the current session
if SESSION_KEY_PLAN in self.request.session:
self.plan = self.request.session[SESSION_KEY_PLAN]
if self.plan.flow_pk != self.flow.pk.hex:
self._logger.warning(
"f(exec): Found existing plan for other flow, deleting plan",
"f(exec): Found existing plan for other flow, deleteing plan",
)
# Existing plan is deleted from session and instance
self.plan = None
@ -208,6 +213,9 @@ class FlowExecutorView(APIView):
serializers=challenge_types(),
resource_type_field_name="component",
),
404: OpenApiResponse(
description="No Token found"
), # This error can be raised by the email stage
},
request=OpenApiTypes.NONE,
parameters=[
@ -433,7 +441,7 @@ class ToDefaultFlow(View):
plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
if plan.flow_pk != flow.pk.hex:
LOGGER.warning(
"f(def): Found existing plan for other flow, deleting plan",
"f(def): Found existing plan for other flow, deleteing plan",
flow_slug=flow.slug,
)
del self.request.session[SESSION_KEY_PLAN]

View File

@ -9,9 +9,7 @@ postgresql:
web:
listen: 0.0.0.0:9000
listen_tls: 0.0.0.0:9443
listen_metrics: 0.0.0.0:9300
load_local_files: false
outpost_port_offset: 0
redis:
host: localhost
@ -56,7 +54,6 @@ outposts:
# %(build_hash)s: Build hash if you're running a beta version
docker_image_base: "ghcr.io/goauthentik/%(type)s:%(version)s"
disable_update_check: false
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
geoip: "./GeoLite2-City.mmdb"

View File

@ -4,13 +4,13 @@ from textwrap import indent
from typing import Any, Iterable, Optional
from django.core.exceptions import FieldError
from requests import Session
from rest_framework.serializers import ValidationError
from sentry_sdk.hub import Hub
from sentry_sdk.tracing import Span
from structlog.stdlib import get_logger
from authentik.core.models import User
from authentik.lib.utils.http import get_http_session
LOGGER = get_logger()
@ -35,7 +35,7 @@ class BaseEvaluator:
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
"ak_user_by": BaseEvaluator.expr_user_by,
"ak_logger": get_logger(),
"requests": get_http_session(),
"requests": Session(),
}
self._context = {}
self._filename = "BaseEvalautor"

View File

@ -93,7 +93,6 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
if "exc_info" in hint:
_, exc_value, _ = hint["exc_info"]
if isinstance(exc_value, ignored_classes):
LOGGER.debug("dropping exception", exception=exc_value)
return None
if "logger" in event:
if event["logger"] in [

View File

@ -32,7 +32,7 @@ class TestConfig(TestCase):
config = ConfigLoader()
environ["foo"] = "bar"
self.assertEqual(config.parse_uri("env://foo"), "bar")
self.assertEqual(config.parse_uri("env://foo?bar"), "bar")
self.assertEqual(config.parse_uri("env://fo?bar"), "bar")
def test_uri_file(self):
"""Test URI parsing (file load)"""

View File

@ -27,7 +27,7 @@ class TestHTTP(TestCase):
token = Token.objects.create(
identifier="test", user=self.user, intent=TokenIntents.INTENT_API
)
# Invalid, non-existent token
# Invalid, non-existant token
request = self.factory.get(
"/",
**{
@ -36,7 +36,7 @@ class TestHTTP(TestCase):
},
)
self.assertEqual(get_client_ip(request), "127.0.0.1")
# Invalid, user doesn't have permissions
# Invalid, user doesn't have permisions
request = self.factory.get(
"/",
**{

View File

@ -1,13 +1,9 @@
"""http helpers"""
from os import environ
from typing import Any, Optional
from django.http import HttpRequest
from requests.sessions import Session
from structlog.stdlib import get_logger
from authentik import ENV_GIT_HASH_KEY, __version__
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
DEFAULT_IP = "255.255.255.255"
@ -64,16 +60,3 @@ def get_client_ip(request: Optional[HttpRequest]) -> str:
if override:
return override
return _get_client_ip_from_meta(request.META)
def authentik_user_agent() -> str:
"""Get a common user agent"""
build = environ.get(ENV_GIT_HASH_KEY, "tagged")
return f"authentik@{__version__} (build={build})"
def get_http_session() -> Session:
"""Get a requests session with common headers"""
session = Session()
session.headers["User-Agent"] = authentik_user_agent()
return session

View File

@ -1,6 +1,5 @@
"""authentik lib reflection utilities"""
from importlib import import_module
from typing import Union
from django.conf import settings
@ -20,12 +19,12 @@ def all_subclasses(cls, sort=True):
return classes
def class_to_path(cls: type) -> str:
def class_to_path(cls):
"""Turn Class (Class or instance) into module path"""
return f"{cls.__module__}.{cls.__name__}"
def path_to_class(path: Union[str, None]) -> Union[type, None]:
def path_to_class(path):
"""Import module and return class"""
if not path:
return None

View File

@ -104,7 +104,7 @@ class OutpostConsumer(AuthJsonConsumer):
expected=self.outpost.config.kubernetes_replicas,
).inc()
LOGGER.debug(
"added outpost instance to cache",
"added outpost instace to cache",
outpost=self.outpost,
instance_uuid=self.last_uid,
)

View File

@ -38,7 +38,6 @@ class DockerController(BaseController):
"AUTHENTIK_HOST": self.outpost.config.authentik_host.lower(),
"AUTHENTIK_INSECURE": str(self.outpost.config.authentik_host_insecure).lower(),
"AUTHENTIK_TOKEN": self.outpost.token.key,
"AUTHENTIK_HOST_BROWSER": self.outpost.config.authentik_host_browser,
}
def _comp_env(self, container: Container) -> bool:
@ -99,16 +98,15 @@ class DockerController(BaseController):
"image": image_name,
"name": container_name,
"detach": True,
"ports": {
f"{port.inner_port or port.port}/{port.protocol.lower()}": port.port
for port in self.deployment_ports
},
"environment": self._get_env(),
"labels": self._get_labels(),
"restart_policy": {"Name": "unless-stopped"},
"network": self.outpost.config.docker_network,
}
if self.outpost.config.docker_map_ports:
container_args["ports"] = {
f"{port.inner_port or port.port}/{port.protocol.lower()}": str(port.port)
for port in self.deployment_ports
}
if settings.TEST:
del container_args["ports"]
del container_args["network"]
@ -166,9 +164,11 @@ class DockerController(BaseController):
self.down()
return self.up(depth + 1)
# Check that container is healthy
if container.status == "running" and container.attrs.get("State", {}).get(
"Health", {}
).get("Status", "") not in ["healthy", "starting"]:
if (
container.status == "running"
and container.attrs.get("State", {}).get("Health", {}).get("Status", "")
!= "healthy"
):
# At this point we know the config is correct, but the container isn't healthy,
# so we just restart it with the same config
if has_been_created:
@ -217,7 +217,6 @@ class DockerController(BaseController):
"AUTHENTIK_HOST": self.outpost.config.authentik_host,
"AUTHENTIK_INSECURE": str(self.outpost.config.authentik_host_insecure),
"AUTHENTIK_TOKEN": self.outpost.token.key,
"AUTHENTIK_HOST_BROWSER": self.outpost.config.authentik_host_browser,
},
"labels": self._get_labels(),
}

View File

@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
from urllib3.exceptions import HTTPError
from authentik import __version__
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate
from authentik.lib.sentry import SentryIgnoredException
from authentik.outposts.managed import MANAGED_OUTPOST
if TYPE_CHECKING:
@ -20,6 +20,18 @@ if TYPE_CHECKING:
T = TypeVar("T", V1Pod, V1Deployment)
class ReconcileTrigger(SentryIgnoredException):
"""Base trigger raised by child classes to notify us"""
class NeedsRecreate(ReconcileTrigger):
"""Exception to trigger a complete recreate of the Kubernetes Object"""
class NeedsUpdate(ReconcileTrigger):
"""Exception to trigger an update to the Kubernetes Object"""
class KubernetesObjectReconciler(Generic[T]):
"""Base Kubernetes Reconciler, handles the basic logic."""
@ -97,7 +109,7 @@ class KubernetesObjectReconciler(Generic[T]):
except (OpenApiException, HTTPError) as exc:
# pylint: disable=no-member
if isinstance(exc, ApiException) and exc.status == 404:
self.logger.debug("Failed to get current, assuming non-existent")
self.logger.debug("Failed to get current, assuming non-existant")
return
self.logger.debug("Other unhandled error", exc=exc)
raise exc
@ -117,7 +129,7 @@ class KubernetesObjectReconciler(Generic[T]):
raise NotImplementedError
def retrieve(self) -> T:
"""API Wrapper to retrieve object"""
"""API Wrapper to retrive object"""
raise NotImplementedError
def delete(self, reference: T):

View File

@ -17,9 +17,7 @@ from kubernetes.client import (
)
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
from authentik.outposts.controllers.k8s.utils import compare_ports
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler, NeedsUpdate
from authentik.outposts.models import Outpost
if TYPE_CHECKING:
@ -37,10 +35,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
self.outpost = self.controller.outpost
def reconcile(self, current: V1Deployment, reference: V1Deployment):
compare_ports(
current.spec.template.spec.containers[0].ports,
reference.spec.template.spec.containers[0].ports,
)
super().reconcile(current, reference)
if current.spec.replicas != reference.spec.replicas:
raise NeedsUpdate()
if (
@ -48,7 +43,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
!= reference.spec.template.spec.containers[0].image
):
raise NeedsUpdate()
super().reconcile(current, reference)
def get_pod_meta(self) -> dict[str, str]:
"""Get common object metadata"""
@ -95,15 +89,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
)
),
),
V1EnvVar(
name="AUTHENTIK_HOST_BROWSER",
value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector(
name=self.name,
key="authentik_host_browser",
)
),
),
V1EnvVar(
name="AUTHENTIK_TOKEN",
value_from=V1EnvVarSource(

View File

@ -5,8 +5,7 @@ from typing import TYPE_CHECKING
from kubernetes.client import CoreV1Api, V1Secret
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler, NeedsUpdate
if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@ -27,7 +26,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
def reconcile(self, current: V1Secret, reference: V1Secret):
super().reconcile(current, reference)
for key in reference.data.keys():
if key not in current.data or current.data[key] != reference.data[key]:
if current.data[key] != reference.data[key]:
raise NeedsUpdate()
def get_reference_object(self) -> V1Secret:
@ -41,9 +40,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
str(self.controller.outpost.config.authentik_host_insecure)
),
"token": b64string(self.controller.outpost.token.key),
"authentik_host_browser": b64string(
self.controller.outpost.config.authentik_host_browser
),
},
)

View File

@ -3,10 +3,9 @@ from typing import TYPE_CHECKING
from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.base import FIELD_MANAGER, DeploymentPort
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler, NeedsUpdate
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
from authentik.outposts.controllers.k8s.utils import compare_ports
if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@ -20,15 +19,46 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
self.api = CoreV1Api(controller.client)
def reconcile(self, current: V1Service, reference: V1Service):
compare_ports(current.spec.ports, reference.spec.ports)
# run the base reconcile last, as that will probably raise NeedsUpdate
# after an authentik update. However the ports might have also changed during
# the update, so this causes the service to be re-created with higher
# priority than being updated.
super().reconcile(current, reference)
if len(current.spec.ports) != len(reference.spec.ports):
raise NeedsUpdate()
for port in reference.spec.ports:
if port not in current.spec.ports:
raise NeedsUpdate()
def get_embedded_reference_object(self) -> V1Service:
"""Get Service for embedded outpost"""
selector_labels = {
"app.kubernetes.io/name": "authentik",
"app.kubernetes.io/component": "server",
}
meta = self.get_object_meta(name=self.name)
ports = []
for port in [
DeploymentPort(9000, "http", "tcp"),
DeploymentPort(9443, "https", "tcp"),
]:
ports.append(
V1ServicePort(
name=port.name,
port=port.port,
protocol=port.protocol.upper(),
target_port=port.inner_port or port.port,
)
)
return V1Service(
metadata=meta,
spec=V1ServiceSpec(
ports=ports,
selector=selector_labels,
type=self.controller.outpost.config.kubernetes_service_type,
),
)
def get_reference_object(self) -> V1Service:
"""Get deployment object for outpost"""
if self.is_embedded:
return self.get_embedded_reference_object()
meta = self.get_object_meta(name=self.name)
ports = []
for port in self.controller.deployment_ports:
@ -40,13 +70,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
target_port=port.inner_port or port.port,
)
)
if self.is_embedded:
selector_labels = {
"app.kubernetes.io/name": "authentik",
"app.kubernetes.io/component": "server",
}
else:
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
return V1Service(
metadata=meta,
spec=V1ServiceSpec(

View File

@ -1,150 +0,0 @@
"""Kubernetes Prometheus ServiceMonitor Reconciler"""
from dataclasses import asdict, dataclass, field
from typing import TYPE_CHECKING
from dacite import from_dict
from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@dataclass
class PrometheusServiceMonitorSpecEndpoint:
"""Prometheus ServiceMonitor endpoint spec"""
port: str
path: str = field(default="/metrics")
@dataclass
class PrometheusServiceMonitorSpecSelector:
"""Prometheus ServiceMonitor selector spec"""
# pylint: disable=invalid-name
matchLabels: dict
@dataclass
class PrometheusServiceMonitorSpec:
"""Prometheus ServiceMonitor spec"""
endpoints: list[PrometheusServiceMonitorSpecEndpoint]
# pylint: disable=invalid-name
selector: PrometheusServiceMonitorSpecSelector
@dataclass
class PrometheusServiceMonitorMetadata:
"""Prometheus ServiceMonitor metadata"""
name: str
namespace: str
labels: dict = field(default_factory=dict)
@dataclass
class PrometheusServiceMonitor:
"""Prometheus ServiceMonitor"""
# pylint: disable=invalid-name
apiVersion: str
kind: str
metadata: PrometheusServiceMonitorMetadata
spec: PrometheusServiceMonitorSpec
CRD_NAME = "servicemonitors.monitoring.coreos.com"
CRD_GROUP = "monitoring.coreos.com"
CRD_VERSION = "v1"
CRD_PLURAL = "servicemonitors"
class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusServiceMonitor]):
"""Kubernetes Prometheus ServiceMonitor Reconciler"""
def __init__(self, controller: "KubernetesController") -> None:
super().__init__(controller)
self.api_ex = ApiextensionsV1Api(controller.client)
self.api = CustomObjectsApi(controller.client)
@property
def noop(self) -> bool:
return (not self._crd_exists()) or (self.is_embedded)
def _crd_exists(self) -> bool:
"""Check if the Prometheus ServiceMonitor exists"""
return bool(
len(
self.api_ex.list_custom_resource_definition(
field_selector=f"metadata.name={CRD_NAME}"
).items
)
)
def get_reference_object(self) -> PrometheusServiceMonitor:
"""Get service monitor object for outpost"""
return PrometheusServiceMonitor(
apiVersion=f"{CRD_GROUP}/{CRD_VERSION}",
kind="ServiceMonitor",
metadata=PrometheusServiceMonitorMetadata(
name=self.name,
namespace=self.namespace,
labels=self.get_object_meta().labels,
),
spec=PrometheusServiceMonitorSpec(
endpoints=[
PrometheusServiceMonitorSpecEndpoint(
port="http-metrics",
)
],
selector=PrometheusServiceMonitorSpecSelector(
matchLabels=self.get_object_meta(name=self.name).labels,
),
),
)
def create(self, reference: PrometheusServiceMonitor):
return self.api.create_namespaced_custom_object(
group=CRD_GROUP,
version=CRD_VERSION,
plural=CRD_PLURAL,
namespace=self.namespace,
body=asdict(reference),
field_manager=FIELD_MANAGER,
)
def delete(self, reference: PrometheusServiceMonitor):
return self.api.delete_namespaced_custom_object(
group=CRD_GROUP,
version=CRD_VERSION,
namespace=self.namespace,
plural=CRD_PLURAL,
name=self.name,
)
def retrieve(self) -> PrometheusServiceMonitor:
return from_dict(
PrometheusServiceMonitor,
self.api.get_namespaced_custom_object(
group=CRD_GROUP,
version=CRD_VERSION,
namespace=self.namespace,
plural=CRD_PLURAL,
name=self.name,
),
)
def update(self, current: PrometheusServiceMonitor, reference: PrometheusServiceMonitor):
return self.api.patch_namespaced_custom_object(
group=CRD_GROUP,
version=CRD_VERSION,
namespace=self.namespace,
plural=CRD_PLURAL,
name=self.name,
body=asdict(reference),
field_manager=FIELD_MANAGER,
)

View File

@ -1,14 +0,0 @@
"""exceptions used by the kubernetes reconciler to trigger updates"""
from authentik.lib.sentry import SentryIgnoredException
class ReconcileTrigger(SentryIgnoredException):
"""Base trigger raised by child classes to notify us"""
class NeedsRecreate(ReconcileTrigger):
"""Exception to trigger a complete recreate of the Kubernetes Object"""
class NeedsUpdate(ReconcileTrigger):
"""Exception to trigger an update to the Kubernetes Object"""

View File

@ -1,11 +1,8 @@
"""k8s utils"""
from pathlib import Path
from kubernetes.client.models.v1_container_port import V1ContainerPort
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate
def get_namespace() -> str:
"""Get the namespace if we're running in a pod, otherwise default to default"""
@ -14,12 +11,3 @@ def get_namespace() -> str:
with open(path, "r", encoding="utf8") as _namespace_file:
return _namespace_file.read()
return "default"
def compare_ports(current: list[V1ContainerPort], reference: list[V1ContainerPort]):
"""Compare ports of a list"""
if len(current) != len(reference):
raise NeedsRecreate()
for port in reference:
if port not in current:
raise NeedsRecreate()

View File

@ -13,7 +13,6 @@ from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
from authentik.outposts.controllers.k8s.secret import SecretReconciler
from authentik.outposts.controllers.k8s.service import ServiceReconciler
from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler
from authentik.outposts.models import KubernetesServiceConnection, Outpost, ServiceConnectionInvalid
@ -33,9 +32,8 @@ class KubernetesController(BaseController):
"secret": SecretReconciler,
"deployment": DeploymentReconciler,
"service": ServiceReconciler,
"prometheus servicemonitor": PrometheusServiceMonitorReconciler,
}
self.reconcile_order = ["secret", "deployment", "service", "prometheus servicemonitor"]
self.reconcile_order = ["secret", "deployment", "service"]
def up(self):
try:

View File

@ -30,7 +30,7 @@ class DockerInlineTLS:
return str(path)
def write(self) -> TLSConfig:
"""Create TLSConfig with Certificate Key pairs"""
"""Create TLSConfig with Certificate Keypairs"""
# So yes, this is quite ugly. But sadly, there is no clean way to pass
# docker-py (which is using requests (which is using urllib3)) a certificate
# for verification or authentication as string.

View File

@ -64,7 +64,6 @@ class OutpostConfig:
authentik_host: str = ""
authentik_host_insecure: bool = False
authentik_host_browser: str = ""
log_level: str = CONFIG.y("log_level")
error_reporting_enabled: bool = CONFIG.y_bool("error_reporting.enabled")
@ -72,7 +71,6 @@ class OutpostConfig:
object_naming_template: str = field(default="ak-outpost-%(name)s")
docker_network: Optional[str] = field(default=None)
docker_map_ports: bool = field(default=True)
kubernetes_replicas: int = field(default=1)
kubernetes_namespace: str = field(default_factory=get_namespace)
@ -341,8 +339,19 @@ class Outpost(ManagedModel):
"""Username for service user"""
return f"ak-outpost-{self.uuid.hex}"
def build_user_permissions(self, user: User):
"""Create per-object and global permissions for outpost service-account"""
@property
def user(self) -> User:
"""Get/create user with access to all required objects"""
users = User.objects.filter(username=self.user_identifier)
if not users.exists():
user: User = User.objects.create(username=self.user_identifier)
user.set_unusable_password()
user.save()
else:
user = users.first()
user.attributes[USER_ATTRIBUTE_SA] = True
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
user.save()
# To ensure the user only has the correct permissions, we delete all of them and re-add
# the ones the user needs
with transaction.atomic():
@ -386,23 +395,6 @@ class Outpost(ManagedModel):
"Updated service account's permissions",
perms=UserObjectPermission.objects.filter(user=user),
)
@property
def user(self) -> User:
"""Get/create user with access to all required objects"""
users = User.objects.filter(username=self.user_identifier)
should_create_user = not users.exists()
if should_create_user:
user: User = User.objects.create(username=self.user_identifier)
user.set_unusable_password()
user.save()
else:
user = users.first()
user.attributes[USER_ATTRIBUTE_SA] = True
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
user.save()
if should_create_user:
self.build_user_permissions(user)
return user
@property

View File

@ -100,7 +100,7 @@ def outpost_controller(
if from_cache:
outpost: Outpost = cache.get(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
else:
outpost: Outpost = Outpost.objects.filter(pk=outpost_pk).first()
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
if not outpost:
return
self.set_uid(slugify(outpost.name))
@ -126,7 +126,6 @@ def outpost_token_ensurer(self: MonitoredTask):
all_outposts = Outpost.objects.all()
for outpost in all_outposts:
_ = outpost.token
outpost.build_user_permissions(outpost.user)
self.set_status(
TaskResult(
TaskResultStatus.SUCCESSFUL,
@ -149,7 +148,10 @@ def outpost_post_save(model_class: str, model_pk: Any):
return
if isinstance(instance, Outpost):
LOGGER.debug("Trigger reconcile for outpost", instance=instance)
LOGGER.debug("Ensuring token and permissions for outpost", instance=instance)
_ = instance.token
_ = instance.user
LOGGER.debug("Trigger reconcile for outpost")
outpost_controller.delay(instance.pk)
if isinstance(instance, (OutpostModel, Outpost)):
@ -182,7 +184,7 @@ def outpost_post_save(model_class: str, model_pk: Any):
def outpost_send_update(model_instace: Model):
"""Send outpost update to all registered outposts, regardless to which authentik
"""Send outpost update to all registered outposts, irregardless to which authentik
instance they are connected"""
channel_layer = get_channel_layer()
if isinstance(model_instace, OutpostModel):
@ -197,7 +199,7 @@ def _outpost_single_update(outpost: Outpost, layer=None):
# Ensure token again, because this function is called when anything related to an
# OutpostModel is saved, so we can be sure permissions are right
_ = outpost.token
outpost.build_user_permissions(outpost.user)
_ = outpost.user
if not layer: # pragma: no cover
layer = get_channel_layer()
for state in OutpostState.for_outpost(outpost):
@ -209,7 +211,7 @@ def _outpost_single_update(outpost: Outpost, layer=None):
@CELERY_APP.task()
def outpost_local_connection():
"""Checks the local environment and create Service connections."""
# Explicitly check against token filename, as that's
# Explicitly check against token filename, as thats
# only present when the integration is enabled
if Path(SERVICE_TOKEN_FILENAME).exists():
LOGGER.debug("Detected in-cluster Kubernetes Config")

View File

@ -81,11 +81,11 @@ class PolicyEngine:
.iterator()
)
def _check_policy_type(self, binding: PolicyBinding):
def _check_policy_type(self, policy: Policy):
"""Check policy type, make sure it's not the root class as that has no logic implemented"""
# pyright: reportGeneralTypeIssues=false
if binding.policy is not None and binding.policy.__class__ == Policy:
raise TypeError(f"Policy '{binding.policy}' is root type")
if policy.__class__ == Policy:
raise TypeError(f"Policy '{policy}' is root type")
def build(self) -> "PolicyEngine":
"""Build wrapper which monitors performance"""
@ -102,7 +102,7 @@ class PolicyEngine:
for binding in self._iter_bindings():
self.__expected_result_count += 1
self._check_policy_type(binding)
self._check_policy_type(binding.policy)
key = cache_key(binding, self.request)
cached_policy = cache.get(key, None)
if cached_policy and self.use_cache:

View File

@ -3,10 +3,8 @@ from ipaddress import ip_address, ip_network
from typing import TYPE_CHECKING, Optional
from django.http import HttpRequest
from django_otp import devices_for_user
from structlog.stdlib import get_logger
from authentik.core.models import User
from authentik.flows.planner import PLAN_CONTEXT_SSO
from authentik.lib.expression.evaluator import BaseEvaluator
from authentik.lib.utils.http import get_client_ip
@ -30,7 +28,6 @@ class PolicyEvaluator(BaseEvaluator):
self._messages = []
self._context["ak_logger"] = get_logger(policy_name)
self._context["ak_message"] = self.expr_func_message
self._context["ak_user_has_authenticator"] = self.expr_func_user_has_authenticator
self._context["ip_address"] = ip_address
self._context["ip_network"] = ip_network
self._filename = policy_name or "PolicyEvaluator"
@ -39,19 +36,6 @@ class PolicyEvaluator(BaseEvaluator):
"""Wrapper to append to messages list, which is returned with PolicyResult"""
self._messages.append(message)
def expr_func_user_has_authenticator(
self, user: User, device_type: Optional[str] = None
) -> bool:
"""Check if a user has any authenticator devices, optionally matching *device_type*"""
user_devices = devices_for_user(user)
if device_type:
for device in user_devices:
device_class = device.__class__.__name__.lower().replace("device", "")
if device_class == device_type:
return True
return False
return len(user_devices) > 0
def set_policy_request(self, request: PolicyRequest):
"""Update context based on policy request (if http request is given, update that too)"""
# update website/docs/expressions/_objects.md

View File

@ -3,10 +3,10 @@ from hashlib import sha1
from django.db import models
from django.utils.translation import gettext as _
from requests import get
from rest_framework.serializers import BaseSerializer
from structlog.stdlib import get_logger
from authentik.lib.utils.http import get_http_session
from authentik.policies.models import Policy, PolicyResult
from authentik.policies.types import PolicyRequest
@ -49,7 +49,7 @@ class HaveIBeenPwendPolicy(Policy):
pw_hash = sha1(password.encode("utf-8")).hexdigest() # nosec
url = f"https://api.pwnedpasswords.com/range/{pw_hash[:5]}"
result = get_http_session().get(url).text
result = get(url).text
final_count = 0
for line in result.split("\r\n"):
full_hash, count = line.split(":")

View File

@ -46,7 +46,7 @@ def cache_key(binding: PolicyBinding, request: PolicyRequest) -> str:
class PolicyProcess(PROCESS_CLASS):
"""Evaluate a single policy within a separate process"""
"""Evaluate a single policy within a seprate process"""
connection: Connection
binding: PolicyBinding

View File

@ -34,7 +34,7 @@ def update_score(request: HttpRequest, username: str, amount: int):
@receiver(user_login_failed)
# pylint: disable=unused-argument
def handle_failed_login(sender, request, credentials, **_):
"""Lower Score for failed login attempts"""
"""Lower Score for failed loging attempts"""
if "username" in credentials:
update_score(request, credentials.get("username"), -1)

View File

@ -14,7 +14,7 @@ from authentik.policies.types import PolicyRequest
def clear_policy_cache():
"""Ensure no policy-related keys are still cached"""
"""Ensure no policy-related keys are stil cached"""
keys = cache.keys("policy_*")
cache.delete(keys)

View File

@ -1,11 +1,11 @@
"""LDAP Provider Docker Controller"""
"""LDAP Provider Docker Contoller"""
from authentik.outposts.controllers.base import DeploymentPort
from authentik.outposts.controllers.docker import DockerController
from authentik.outposts.models import DockerServiceConnection, Outpost
class LDAPDockerController(DockerController):
"""LDAP Provider Docker Controller"""
"""LDAP Provider Docker Contoller"""
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
super().__init__(outpost, connection)

View File

@ -1,16 +1,15 @@
"""LDAP Provider Kubernetes Controller"""
"""LDAP Provider Kubernetes Contoller"""
from authentik.outposts.controllers.base import DeploymentPort
from authentik.outposts.controllers.kubernetes import KubernetesController
from authentik.outposts.models import KubernetesServiceConnection, Outpost
class LDAPKubernetesController(KubernetesController):
"""LDAP Provider Kubernetes Controller"""
"""LDAP Provider Kubernetes Contoller"""
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
super().__init__(outpost, connection)
self.deployment_ports = [
DeploymentPort(389, "ldap", "tcp", 3389),
DeploymentPort(636, "ldaps", "tcp", 6636),
DeploymentPort(9300, "http-metrics", "tcp", 9300),
]

View File

@ -1,8 +1,6 @@
"""OAuth2Provider API Views"""
from django_filters.filters import AllValuesMultipleFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.propertymappings import PropertyMappingSerializer
@ -25,7 +23,7 @@ class ScopeMappingSerializer(PropertyMappingSerializer):
class ScopeMappingFilter(FilterSet):
"""Filter for ScopeMapping"""
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
managed = AllValuesMultipleFilter(field_name="managed")
class Meta:
model = ScopeMapping

View File

@ -151,13 +151,12 @@ class AuthorizeError(OAuth2Error):
# http://openid.net/specs/openid-connect-core-1_0.html#ImplicitAuthError
hash_or_question = "#" if self.grant_type == GrantTypes.IMPLICIT else "?"
uri = (
f"{self.redirect_uri}{hash_or_question}error="
f"{self.error}&error_description={description}"
uri = "{0}{1}error={2}&error_description={3}".format(
self.redirect_uri, hash_or_question, self.error, description
)
# Add state if present.
uri = uri + (f"&state={self.state}" if self.state else "")
uri = uri + ("&state={0}".format(self.state) if self.state else "")
return uri

View File

@ -1,24 +0,0 @@
# Generated by Django 3.2.6 on 2021-09-08 15:12
from django.db import migrations, models
import authentik.lib.utils.time
class Migration(migrations.Migration):
dependencies = [
("authentik_providers_oauth2", "0016_alter_authorizationcode_nonce"),
]
operations = [
migrations.AlterField(
model_name="oauth2provider",
name="token_validity",
field=models.TextField(
default="days=30",
help_text="Tokens not valid on or after current time + this value (Format: hours=1;minutes=2;seconds=3).",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
]

View File

@ -182,7 +182,7 @@ class OAuth2Provider(Provider):
),
)
token_validity = models.TextField(
default="days=30",
default="minutes=10",
validators=[timedelta_string_validator],
help_text=_(
(

View File

@ -247,7 +247,7 @@ class TestAuthorize(OAuthTestCase):
"to": (
f"http://localhost#access_token={token.access_token}"
f"&id_token={provider.encode(token.id_token.to_dict())}&token_type=bearer"
f"&expires_in=60&state={state}"
f"&expires_in=600&state={state}"
),
},
)

View File

@ -141,7 +141,7 @@ class TestToken(OAuthTestCase):
"access_token": new_token.access_token,
"refresh_token": new_token.refresh_token,
"token_type": "bearer",
"expires_in": 2592000,
"expires_in": 600,
"id_token": provider.encode(
new_token.id_token.to_dict(),
),
@ -190,7 +190,7 @@ class TestToken(OAuthTestCase):
"access_token": new_token.access_token,
"refresh_token": new_token.refresh_token,
"token_type": "bearer",
"expires_in": 2592000,
"expires_in": 600,
"id_token": provider.encode(
new_token.id_token.to_dict(),
),
@ -236,7 +236,7 @@ class TestToken(OAuthTestCase):
"access_token": new_token.access_token,
"refresh_token": new_token.refresh_token,
"token_type": "bearer",
"expires_in": 2592000,
"expires_in": 600,
"id_token": provider.encode(
new_token.id_token.to_dict(),
),

View File

@ -153,7 +153,7 @@ def protected_resource_view(scopes: list[str]):
if not set(scopes).issubset(set(token.scope)):
LOGGER.warning(
"Scope mismatch.",
"Scope missmatch.",
required=set(scopes),
token_has=set(token.scope),
)

View File

@ -238,10 +238,6 @@ class OAuthFulfillmentStage(StageView):
parsed = urlparse(uri)
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Wrapper when this stage gets hit with a post request"""
return self.get(request, *args, **kwargs)
# pylint: disable=unused-argument
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""final Stage of an OAuth2 Flow"""
@ -371,7 +367,7 @@ class OAuthFulfillmentStage(StageView):
query_fragment["token_type"] = "bearer"
query_fragment["expires_in"] = int(
timedelta_from_string(self.provider.access_code_validity).total_seconds()
timedelta_from_string(self.provider.token_validity).total_seconds()
)
query_fragment["state"] = self.params.state if self.params.state else ""

View File

@ -33,7 +33,7 @@ class UserInfoView(View):
for scope in ScopeMapping.objects.filter(scope_name__in=scopes).order_by("scope_name"):
if scope.description != "":
scope_descriptions.append({"id": scope.scope_name, "name": scope.description})
# GitHub Compatibility Scopes are handled differently, since they required custom paths
# GitHub Compatibility Scopes are handeled differently, since they required custom paths
# Hence they don't exist as Scope objects
github_scope_map = {
SCOPE_GITHUB_USER: ("GitHub Compatibility: Access your User Information"),

View File

@ -1,7 +1,7 @@
"""ProxyProvider API Views"""
from typing import Any, Optional
from typing import Any
from drf_spectacular.utils import extend_schema_field
from drf_spectacular.utils import extend_schema_field, extend_schema_serializer
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, ListField, SerializerMethodField
from rest_framework.serializers import ModelSerializer
@ -10,7 +10,6 @@ from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
from authentik.lib.utils.time import timedelta_from_string
from authentik.providers.oauth2.views.provider import ProviderInfoView
from authentik.providers.proxy.models import ProxyMode, ProxyProvider
@ -73,7 +72,6 @@ class ProxyProviderSerializer(ProviderSerializer):
"mode",
"redirect_uris",
"cookie_domain",
"token_validity",
]
@ -103,20 +101,11 @@ class ProxyProviderViewSet(UsedByMixin, ModelViewSet):
ordering = ["name"]
@extend_schema_serializer(deprecate_fields=["forward_auth_mode"])
class ProxyOutpostConfigSerializer(ModelSerializer):
"""Proxy provider serializer for outposts"""
oidc_configuration = SerializerMethodField()
token_validity = SerializerMethodField()
@extend_schema_field(OpenIDConnectConfigurationSerializer)
def get_oidc_configuration(self, obj: ProxyProvider):
"""Embed OpenID Connect provider information"""
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
def get_token_validity(self, obj: ProxyProvider) -> Optional[float]:
"""Get token validity as second count"""
return timedelta_from_string(obj.token_validity).total_seconds()
class Meta:
@ -138,9 +127,13 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
"basic_auth_user_attribute",
"mode",
"cookie_domain",
"token_validity",
]
@extend_schema_field(OpenIDConnectConfigurationSerializer)
def get_oidc_configuration(self, obj: ProxyProvider):
"""Embed OpenID Connect provider information"""
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
class ProxyOutpostConfigViewSet(ReadOnlyModelViewSet):
"""ProxyProvider Viewset"""

View File

@ -1,4 +1,4 @@
"""Proxy Provider Docker Controller"""
"""Proxy Provider Docker Contoller"""
from urllib.parse import urlparse
from authentik.outposts.controllers.base import DeploymentPort
@ -8,13 +8,13 @@ from authentik.providers.proxy.models import ProxyProvider
class ProxyDockerController(DockerController):
"""Proxy Provider Docker Controller"""
"""Proxy Provider Docker Contoller"""
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
super().__init__(outpost, connection)
self.deployment_ports = [
DeploymentPort(9000, "http", "tcp"),
DeploymentPort(9443, "https", "tcp"),
DeploymentPort(4180, "http", "tcp"),
DeploymentPort(4443, "https", "tcp"),
]
def _get_labels(self) -> dict[str, str]:
@ -29,11 +29,6 @@ class ProxyDockerController(DockerController):
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = f"Host({','.join(hosts)})"
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
labels[f"traefik.http.routers.{traefik_name}-router.service"] = f"{traefik_name}-service"
labels[
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"
] = "/akprox/ping"
labels[
f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.port"
] = "9300"
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.server.port"] = "9000"
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.healthcheck.path"] = "/"
labels[f"traefik.http.services.{traefik_name}-service.loadbalancer.server.port"] = "4180"
return labels

View File

@ -14,8 +14,11 @@ from kubernetes.client import (
from kubernetes.client.models.networking_v1beta1_ingress_rule import NetworkingV1beta1IngressRule
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate
from authentik.outposts.controllers.k8s.base import (
KubernetesObjectReconciler,
NeedsRecreate,
NeedsUpdate,
)
from authentik.providers.proxy.models import ProxyMode, ProxyProvider
if TYPE_CHECKING:
@ -60,15 +63,8 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
have_hosts_tls = []
if current.spec.tls:
for tls_config in current.spec.tls:
if not tls_config:
continue
if tls_config.hosts:
if tls_config and tls_config.hosts:
have_hosts_tls += tls_config.hosts
if (
tls_config.secret_name
!= self.controller.outpost.config.kubernetes_ingress_secret_name
):
raise NeedsUpdate()
have_hosts_tls.sort()
if have_hosts != expected_hosts:

Some files were not shown because too many files have changed in this diff Show More