Compare commits

..

1 Commits

Author SHA1 Message Date
aa80babfff release: 2023.4.0 2023-04-14 13:28:57 +03:00
319 changed files with 17002 additions and 28012 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.4.1
current_version = 2023.4.0
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -6,4 +6,3 @@ dist/**
build/**
build_docs/**
Dockerfile
authentik/enterprise

View File

@ -1,9 +1,10 @@
---
name: Bug report
about: Create a report to help us improve
title: ""
title: ''
labels: bug
assignees: ""
assignees: ''
---
**Describe the bug**
@ -11,7 +12,6 @@ A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
@ -27,9 +27,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,9 +1,10 @@
---
name: Feature request
about: Suggest an idea for this project
title: ""
title: ''
labels: enhancement
assignees: ""
assignees: ''
---
**Is your feature request related to a problem? Please describe.**

View File

@ -1,9 +1,10 @@
---
name: Question
about: Ask a question about a feature or specific configuration
title: ""
title: ''
labels: question
assignees: ""
assignees: ''
---
**Describe your question/**
@ -19,9 +20,8 @@ If applicable, add screenshots to help explain your problem.
Output of docker-compose logs or kubectl logs respectively
**Version and Deployment (please complete the following information):**
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
- authentik version: [e.g. 2021.8.5]
- Deployment: [e.g. docker-compose, helm]
**Additional context**
Add any other context about the problem here.

View File

@ -1,5 +1,5 @@
name: "Comment usage instructions on PRs"
description: "Comment usage instructions on PRs"
name: 'Comment usage instructions on PRs'
description: 'Comment usage instructions on PRs'
inputs:
tag:
@ -17,7 +17,7 @@ runs:
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
comment-author: 'github-actions[bot]'
body-includes: authentik PR Installation instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2

View File

@ -1,5 +1,5 @@
name: "Prepare docker environment variables"
description: "Prepare docker environment variables"
name: 'Prepare docker environment variables'
description: 'Prepare docker environment variables'
outputs:
shouldBuild:
@ -51,14 +51,12 @@ runs:
version_family = ".".join(version.split(".")[:-1])
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-")
sha = os.environ["GITHUB_SHA"] if not "${{ github.event.pull_request.head.sha }}" else "${{ github.event.pull_request.head.sha }}"
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print("branchName=%s" % branch_name, file=_output)
print("branchNameContainer=%s" % safe_branch_name, file=_output)
print("timestamp=%s" % int(time()), file=_output)
print("sha=%s" % sha, file=_output)
print("shortHash=%s" % sha[:7], file=_output)
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
print("shouldBuild=%s" % should_build, file=_output)
print("version=%s" % version, file=_output)
print("versionFamily=%s" % version_family, file=_output)

View File

@ -1,5 +1,5 @@
name: "Setup authentik testing environment"
description: "Setup authentik testing environment"
name: 'Setup authentik testing environment'
description: 'Setup authentik testing environment'
inputs:
postgresql_tag:
@ -18,13 +18,13 @@ runs:
- name: Setup python and restore poetry
uses: actions/setup-python@v3
with:
python-version: "3.11"
cache: "poetry"
python-version: '3.11'
cache: 'poetry'
- name: Setup node
uses: actions/setup-node@v3
uses: actions/setup-node@v3.1.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Setup dependencies
shell: bash

View File

@ -1,21 +1,23 @@
version: "3.7"
version: '3.7'
services:
postgresql:
image: docker.io/library/postgres:${PSQL_TAG:-12}
container_name: postgres
image: library/postgres:${PSQL_TAG:-12}
volumes:
- db-data:/var/lib/postgresql/data
- db-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
POSTGRES_DB: authentik
ports:
- 5432:5432
- 5432:5432
restart: always
redis:
image: docker.io/library/redis
container_name: redis
image: library/redis
ports:
- 6379:6379
- 6379:6379
restart: always
volumes:

120
.github/dependabot.yml vendored
View File

@ -1,62 +1,62 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "ci:"
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "web:"
- package-ecosystem: npm
directory: "/website"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "website:"
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"
- package-ecosystem: docker
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
reviewers:
- "@goauthentik/core"
commit-message:
prefix: "core:"

View File

@ -5,35 +5,15 @@ Please check the [Contributing guidelines](https://github.com/goauthentik/authen
-->
# Details
- **Does this resolve an issue?**
Resolves #
* **Does this resolve an issue?**
Resolves #
## Changes
### New Features
- Adds feature which does x, y, and z.
* Adds feature which does x, y, and z.
### Breaking Changes
* Adds breaking change which causes \<issue\>.
- Adds breaking change which causes \<issue\>.
## Checklist
- [ ] Local tests pass (`ak test authentik/`)
- [ ] The code has been formatted (`make lint-fix`)
If an API change has been made
- [ ] The API schema has been updated (`make gen-build`)
If changes to the frontend have been made
- [ ] The code has been formatted (`make web`)
- [ ] The translation files have been updated (`make i18n-extract`)
If applicable
- [ ] The documentation has been updated
- [ ] The documentation has been formatted (`make website`)
## Additional
Any further notes or comments you want to make.

View File

@ -6,11 +6,11 @@ git:
source_language: en
source_file: web/src/locales/en.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "web/src/locales/<lang>.po"
translation_files_expression: 'web/src/locales/<lang>.po'
- filter_type: file
# all supported i18n types: https://docs.transifex.com/formats
file_format: PO
source_language: en
source_file: locale/en/LC_MESSAGES/django.po
# path expression to translation files, must contain <lang> placeholder
translation_files_expression: "locale/<lang>/LC_MESSAGES/django.po"
translation_files_expression: 'locale/<lang>/LC_MESSAGES/django.po'

View File

@ -23,14 +23,13 @@ jobs:
fail-fast: false
matrix:
job:
- bandit
- black
- codespell
- isort
- pending-migrations
- pylint
- black
- isort
- bandit
- pyright
- ruff
- pending-migrations
- codespell
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -61,7 +60,7 @@ jobs:
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
git checkout $(git describe --abbrev=0 --match 'version/*')
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (ensure stable deps are installed)
@ -187,8 +186,6 @@ jobs:
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
@ -214,7 +211,6 @@ jobs:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
@ -231,8 +227,6 @@ jobs:
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
@ -258,7 +252,6 @@ jobs:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.sha }}-arm64
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}

View File

@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Prepare and generate API
run: |
# Create folder structure for go embeds
@ -30,14 +30,13 @@ jobs:
uses: golangci/golangci-lint-action@v3
with:
args: --timeout 5000s
skip-pkg-cache: true
test-unittest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Generate API
run: make gen-client-go
- name: Go unittests
@ -61,11 +60,11 @@ jobs:
- proxy
- ldap
- radius
arch:
- "linux/amd64"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
@ -95,7 +94,7 @@ jobs:
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/amd64,linux/arm64
platforms: ${{ matrix.arch }}
context: .
build-binary:
timeout-minutes: 120
@ -113,14 +112,12 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
go-version: "^1.17"
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
node-version: "18"
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API
@ -136,3 +133,7 @@ jobs:
export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }}
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- uses: actions/upload-artifact@v3
with:
name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
path: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@ -17,8 +17,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -33,8 +33,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -49,8 +49,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
@ -65,8 +65,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: |
@ -97,8 +97,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci

View File

@ -17,8 +17,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
@ -31,8 +31,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci
@ -52,8 +52,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: website/package-lock.json
- working-directory: website/
run: npm ci

View File

@ -2,11 +2,12 @@ name: "CodeQL"
on:
push:
branches: [main, "*", next, version*]
branches: [ main, '*', next, version* ]
pull_request:
branches: [main]
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: "30 6 * * 5"
- cron: '30 6 * * 5'
jobs:
analyze:
@ -20,17 +21,40 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ["go", "javascript", "python"]
language: [ 'go', 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -2,7 +2,7 @@ name: ghcr-retention
on:
schedule:
- cron: "0 0 * * *" # every day at midnight
- cron: '0 0 * * *' # every day at midnight
workflow_dispatch:
jobs:

View File

@ -57,7 +57,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
go-version: "^1.17"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
@ -107,11 +107,11 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
go-version: "^1.17"
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
cache: "npm"
node-version: '18'
cache: 'npm'
cache-dependency-path: web/package-lock.json
- name: Build web
working-directory: web/
@ -173,5 +173,5 @@ jobs:
SENTRY_PROJECT: authentik
with:
version: authentik@${{ steps.ev.outputs.version }}
sourcemaps: "./web/dist"
url_prefix: "~/static/dist"
sourcemaps: './web/dist'
url_prefix: '~/static/dist'

View File

@ -3,7 +3,7 @@ name: authentik-on-tag
on:
push:
tags:
- "version/*"
- 'version/*'
jobs:
build:

View File

@ -1,34 +0,0 @@
name: authentik-translation-advice
on:
pull_request:
branches:
- main
paths:
- "!**"
- "locale/**"
- "web/src/locales/**"
jobs:
post-comment:
runs-on: ubuntu-latest
steps:
- name: Find Comment
uses: peter-evans/find-comment@v2
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
body-includes: authentik translations instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
### authentik translations instructions
Thanks for your pull request!
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.

View File

@ -1,9 +1,12 @@
name: authentik-backend-translate-compile
on:
push:
branches: [main]
branches: [ main ]
paths:
- "locale/**"
- '/locale/'
pull_request:
paths:
- '/locale/'
workflow_dispatch:
env:
@ -21,7 +24,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
run: poetry run ak compilemessages
run: poetry run ./manage.py compilemessages
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
id: cpr

View File

@ -1,9 +1,9 @@
name: authentik-web-api-publish
on:
push:
branches: [main]
branches: [ main ]
paths:
- "schema.yml"
- 'schema.yml'
workflow_dispatch:
jobs:
build:
@ -14,8 +14,8 @@ jobs:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- uses: actions/setup-node@v3.6.0
with:
node-version: "20"
registry-url: "https://registry.npmjs.org"
node-version: '18'
registry-url: 'https://registry.npmjs.org'
- name: Generate API Client
run: make gen-client-ts
- name: Publish package

View File

@ -1,5 +1,5 @@
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as website-builder
COPY ./website /work/website/
COPY ./blueprints /work/blueprints/
@ -10,7 +10,7 @@ WORKDIR /work/website
RUN npm ci && npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:18 as web-builder
COPY ./web /work/web/
COPY ./website /work/website/
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.20.4-bullseye AS go-builder
FROM docker.io/golang:1.20.3-bullseye AS go-builder
WORKDIR /work
@ -83,7 +83,7 @@ RUN apt-get update && \
# Required for runtime
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
apt-get install -y --no-install-recommends curl runit && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \

View File

@ -1,11 +1,6 @@
Copyright (c) 2023 Jens Langhammer
MIT License
Portions of this software are licensed as follows:
* All content residing under the "website/" directory of this repository is licensed under "Creative Commons: CC BY-SA 4.0 license".
* All content that resides under the "authentik/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "authentik/enterprise/LICENSE".
* All client-side JavaScript (when served directly or after being compiled, arranged, augmented, or combined), is licensed under the "MIT Expat" license.
* All third party components incorporated into the authentik are licensed under the original license provided by the owner of the applicable component.
* Content outside of the above mentioned directories or restrictions above is available under the "MIT" license as defined below.
Copyright (c) 2022 Jens Langhammer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -3,7 +3,6 @@ PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
@ -39,14 +38,13 @@ test:
coverage report
lint-fix:
isort authentik $(PY_SOURCES)
black authentik $(PY_SOURCES)
ruff authentik $(PY_SOURCES)
isort authentik tests scripts lifecycle
black authentik tests scripts lifecycle
codespell -w $(CODESPELL_ARGS)
lint:
pylint $(PY_SOURCES)
bandit -r $(PY_SOURCES) -x node_modules
pylint authentik tests lifecycle
bandit -r authentik tests lifecycle -x node_modules
golangci-lint run -v
migrate:
@ -173,6 +171,7 @@ website-watch:
# These targets are use by GitHub actions to allow usage of matrix
# which makes the YAML File a lot smaller
PY_SOURCES=authentik tests lifecycle
ci--meta-debug:
python -V
node --version
@ -183,9 +182,6 @@ ci-pylint: ci--meta-debug
ci-black: ci--meta-debug
black --check $(PY_SOURCES)
ci-ruff: ci--meta-debug
ruff check $(PY_SOURCES)
ci-codespell: ci--meta-debug
codespell $(CODESPELL_ARGS) -s

View File

@ -2,7 +2,7 @@
from os import environ
from typing import Optional
__version__ = "2023.4.1"
__version__ = "2023.4.0"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -29,7 +29,6 @@ class Capabilities(models.TextChoices):
CAN_GEO_IP = "can_geo_ip"
CAN_IMPERSONATE = "can_impersonate"
CAN_DEBUG = "can_debug"
IS_ENTERPRISE = "is_enterprise"
class ErrorReportingConfigSerializer(PassiveSerializer):
@ -71,8 +70,6 @@ class ConfigView(APIView):
caps.append(Capabilities.CAN_IMPERSONATE)
if settings.DEBUG: # pragma: no cover
caps.append(Capabilities.CAN_DEBUG)
if "authentik.enterprise" in settings.INSTALLED_APPS:
caps.append(Capabilities.IS_ENTERPRISE)
return caps
def get_config(self) -> ConfigSerializer:

View File

@ -49,8 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {}
valid, logs = Importer(content, context).validate()
if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
return content
def validate(self, attrs: dict) -> dict:

View File

@ -1,17 +1,12 @@
"""Generate JSON Schema for blueprints"""
from json import dumps
from typing import Any
from json import dumps, loads
from pathlib import Path
from django.core.management.base import BaseCommand, no_translations
from django.db.models import Model
from drf_jsonschema_serializer.convert import field_to_converter
from rest_framework.fields import Field, JSONField, UUIDField
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.blueprints.v1.meta.registry import registry
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
@ -21,138 +16,21 @@ class Command(BaseCommand):
schema: dict
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.schema = {
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object",
"title": "authentik Blueprint schema",
"required": ["version", "entries"],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1,
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": ["name"],
"properties": {
"name": {"type": "string"},
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
},
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": True,
},
"entries": {
"type": "array",
"items": {
"oneOf": [],
},
},
},
"$defs": {},
}
@no_translations
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
self.build()
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
path = Path(__file__).parent.joinpath("./schema_template.json")
with open(path, "r", encoding="utf-8") as _template_file:
self.schema = loads(_template_file.read())
self.set_model_allowed()
self.stdout.write(dumps(self.schema, indent=4))
@staticmethod
def json_default(value: Any) -> Any:
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
return str(value)
def build(self):
"""Build all models into the schema"""
def set_model_allowed(self):
"""Set model enum"""
model_names = []
for model in registry.get_models():
if model._meta.abstract:
continue
if not is_model_allowed(model):
continue
model_instance: Model = model()
if not isinstance(model_instance, SerializerModel):
continue
serializer = model_instance.serializer()
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, serializer)
)
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
"""Template entry for a single model"""
model_schema = self.to_jsonschema(serializer)
model_schema["required"] = []
def_name = f"model_{model_path}"
def_path = f"#/$defs/{def_name}"
self.schema["$defs"][def_name] = model_schema
return {
"type": "object",
"required": ["model", "attrs"],
"properties": {
"model": {"const": model_path},
"id": {"type": "string"},
"state": {
"type": "string",
"enum": ["absent", "present", "created"],
"default": "present",
},
"conditions": {"type": "array", "items": {"type": "boolean"}},
"attrs": {"$ref": def_path},
"identifiers": {"$ref": def_path},
},
}
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""
if isinstance(field, Serializer):
result = self.to_jsonschema(field)
else:
try:
converter = field_to_converter[field]
result = converter.convert(field)
except KeyError:
if isinstance(field, JSONField):
result = {"type": "object", "additionalProperties": True}
elif isinstance(field, UUIDField):
result = {"type": "string", "format": "uuid"}
else:
raise
if field.label:
result["title"] = field.label
if field.help_text:
result["description"] = field.help_text
return self.clean_result(result)
def clean_result(self, result: dict) -> dict:
"""Remove enumNames from result, recursively"""
result.pop("enumNames", None)
for key, value in result.items():
if isinstance(value, dict):
result[key] = self.clean_result(value)
return result
def to_jsonschema(self, serializer: Serializer) -> dict:
"""Convert serializer to json schema"""
properties = {}
required = []
for name, field in serializer.fields.items():
if field.read_only:
continue
sub_schema = self.field_to_jsonschema(field)
if field.required:
required.append(name)
properties[name] = sub_schema
result = {"type": "object", "properties": properties}
if required:
result["required"] = required
return result
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
model_names.sort()
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names

View File

@ -0,0 +1,105 @@
{
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "http://example.com/example.json",
"type": "object",
"title": "authentik Blueprint schema",
"default": {},
"required": [
"version",
"entries"
],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"labels": {
"type": "object"
}
}
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": true
},
"entries": {
"type": "array",
"items": {
"$id": "#entry",
"type": "object",
"required": [
"model"
],
"properties": {
"model": {
"type": "string",
"enum": [
"placeholder"
]
},
"id": {
"type": "string"
},
"state": {
"type": "string",
"enum": [
"absent",
"present",
"created"
],
"default": "present"
},
"conditions": {
"type": "array",
"items": {
"type": "boolean"
}
},
"attrs": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Commonly available field, may not exist on all models"
}
},
"default": {},
"additionalProperties": true
},
"identifiers": {
"type": "object",
"default": {},
"properties": {
"pk": {
"description": "Commonly available field, may not exist on all models",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"format": "uuid"
}
]
}
},
"additionalProperties": true
}
}
}
}
}
}

View File

@ -6,6 +6,7 @@ from pathlib import Path
import django.contrib.postgres.fields
from dacite.core import from_dict
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from yaml import load
@ -14,7 +15,7 @@ from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_SYSTEM
from authentik.lib.config import CONFIG
def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
def check_blueprint_v1_file(BlueprintInstance: type["BlueprintInstance"], path: Path):
"""Check if blueprint should be imported"""
from authentik.blueprints.models import BlueprintInstanceStatus
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata

View File

@ -1,20 +0,0 @@
# Generated by Django 4.1.7 on 2023-04-28 10:49
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0002_blueprintinstance_content"),
]
operations = [
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
migrations.AlterField(
model_name="blueprintinstance",
name="name",
field=models.TextField(unique=True),
),
]

View File

@ -57,7 +57,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField(default="", blank=True)
content = models.TextField(default="", blank=True)

View File

@ -67,7 +67,4 @@ class TestBlueprintsV1API(APITestCase):
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(),
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
)
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})

View File

@ -299,7 +299,7 @@ class Importer:
orig_import = deepcopy(self.__import)
if self.__import.version != 1:
self.logger.warning("Invalid blueprint version")
return False, [{"event": "Invalid blueprint version"}]
return False, []
with (
transaction_rollback(),
capture_logs() as logs,

View File

@ -101,10 +101,7 @@ def blueprints_find():
"""Find blueprints and return valid ones"""
blueprints = []
root = Path(CONFIG.y("blueprints_dir"))
for path in root.rglob("**/*.yaml"):
# Check if any part in the path starts with a dot and assume a hidden file
if any(part for part in path.parts if part.startswith(".")):
continue
for path in root.glob("**/*.yaml"):
LOGGER.debug("found blueprint", path=str(path))
with open(path, "r", encoding="utf-8") as blueprint_file:
try:

View File

@ -93,6 +93,7 @@ class PropertyMappingViewSet(
{
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
# pyright: reportGeneralTypeIssues=false
"component": subclass().component,
"model_name": subclass._meta.model_name,
}

View File

@ -25,6 +25,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Provider) -> str: # pragma: no cover
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Provider:
return ""
return obj.component

View File

@ -40,6 +40,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Source) -> str:
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Source:
return ""
return obj.component
@ -138,6 +139,7 @@ class SourceViewSet(
component = subclass.__bases__[0]().component
else:
component = subclass().component
# pyright: reportGeneralTypeIssues=false
data.append(
{
"name": subclass._meta.verbose_name,

View File

@ -56,6 +56,7 @@ class UsedByMixin:
# pylint: disable=too-many-locals
def used_by(self, request: Request, *args, **kwargs) -> Response:
"""Get a list of all objects that use this object"""
# pyright: reportGeneralTypeIssues=false
model: Model = self.get_object()
used_by = []
shadows = []

View File

@ -13,7 +13,6 @@ from authentik.core.views.interface import FlowInterfaceView, InterfaceView
from authentik.core.views.session import EndSessionView
from authentik.root.asgi_middleware import SessionMiddleware
from authentik.root.messages.consumer import MessageConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware
urlpatterns = [
path(
@ -71,10 +70,7 @@ urlpatterns = [
websocket_urlpatterns = [
path(
"ws/client/",
ChannelsLoggingMiddleware(
CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
),
"ws/client/", CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
),
]

View File

@ -160,7 +160,6 @@ class CertificateKeyPairSerializer(ModelSerializer):
"managed",
]
extra_kwargs = {
"managed": {"read_only": True},
"key_data": {"write_only": True},
"certificate_data": {"write_only": True},
}

View File

@ -2,6 +2,8 @@
from django.db import migrations
from authentik.lib.generators import generate_id
class Migration(migrations.Migration):
dependencies = [

View File

@ -1,20 +0,0 @@
# Generated by Django 4.1.7 on 2023-04-28 10:49
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_crypto", "0003_certificatekeypair_managed"),
]
operations = [
migrations.RunPython(fallback_names("authentik_crypto", "certificatekeypair", "name")),
migrations.AlterField(
model_name="certificatekeypair",
name="name",
field=models.TextField(unique=True),
),
]

View File

@ -26,7 +26,7 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
kp_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
certificate_data = models.TextField(help_text=_("PEM-encoded Certificate data"))
key_data = models.TextField(
help_text=_(

View File

@ -37,22 +37,20 @@ class TestCrypto(APITestCase):
keypair = create_test_cert()
self.assertTrue(
CertificateKeyPairSerializer(
instance=keypair,
data={
"name": keypair.name,
"certificate_data": keypair.certificate_data,
"key_data": keypair.key_data,
},
}
).is_valid()
)
self.assertFalse(
CertificateKeyPairSerializer(
instance=keypair,
data={
"name": keypair.name,
"certificate_data": "test",
"key_data": "test",
},
}
).is_valid()
)
@ -248,6 +246,7 @@ class TestCrypto(APITestCase):
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
_key.write(builder.private_key)
with CONFIG.patch("cert_discovery_dir", temp_dir):
# pyright: reportGeneralTypeIssues=false
certificate_discovery() # pylint: disable=no-value-for-parameter
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
managed=MANAGED_DISCOVERED % "foo"

View File

@ -1,45 +0,0 @@
The authentik Enterprise Edition (EE) license (the “EE License”)
Copyright (c) 2022-present Authentik Security Inc.
With regard to the authentik Software:
This software and associated documentation files (the "Software") may only be
used in production, if you (and any entity that you represent) have agreed to,
and are in compliance with, the Authentik Subscription Terms of Service, available
at https://goauthentik.io/legal/terms (the "EE Terms"), or other
agreement governing the use of the Software, as agreed by you and authentik Security Inc,
and otherwise have a valid authentik Enterprise Edition subscription for the
correct number of user seats. Subject to the foregoing sentence, you are free to
modify this Software and publish patches to the Software. You agree that Authentik
Security Inc. and/or its licensors (as applicable) retain all right, title and interest
in and to all such modifications and/or patches, and all such modifications and/or
patches may only be used, copied, modified, displayed, distributed, or otherwise
exploited with a valid authentik Enterprise Edition subscription for the correct
number of user seats. Notwithstanding the foregoing, you may copy and modify
the Software for development and testing purposes, without requiring a
subscription. You agree that Authentik Security Inc. and/or its
licensors (as applicable) retain all right, title and interest in
and to all such modifications. You are not granted any other rights
beyond what is expressly stated herein. Subject to the
foregoing, it is forbidden to copy, merge, publish, distribute, sublicense,
and/or sell the Software.
This EE License applies only to the part of this Software that is not
distributed as part of authentik Open Source (OSS). Any part of this Software
distributed as part of authentik OSS or is served client-side as an image, font,
cascading stylesheet (CSS), file which produces or is compiled, arranged,
augmented, or combined into client-side JavaScript, in whole or in part, is
copyrighted under the MIT license. The full text of this EE License shall
be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
For all third party components incorporated into the authentik Software, those
components are licensed under the original license provided by the owner of the
applicable component.

View File

@ -1,11 +0,0 @@
"""Enterprise app config"""
from authentik.blueprints.apps import ManagedAppConfig
class AuthentikEnterpriseConfig(ManagedAppConfig):
"""Enterprise app config"""
name = "authentik.enterprise"
label = "authentik_enterprise"
verbose_name = "authentik Enterprise"
default = True

View File

@ -1 +0,0 @@
"""Enterprise additional settings"""

View File

@ -11,6 +11,7 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
import authentik.events.models
import authentik.lib.models
from authentik.events.models import EventAction, NotificationSeverity, TransportMode
from authentik.lib.migrations import progress_bar

View File

@ -219,13 +219,13 @@ class Event(SerializerModel, ExpiringModel):
self.context["http_request"] = {
"path": request.path,
"method": request.method,
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
"args": QueryDict(request.META.get("QUERY_STRING", "")),
}
# Special case for events created during flow execution
# since they keep the http query within a wrapped query
if QS_QUERY in self.context["http_request"]["args"]:
wrapped = self.context["http_request"]["args"][QS_QUERY]
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
self.context["http_request"]["args"] = QueryDict(wrapped)
if hasattr(request, "tenant"):
tenant: Tenant = request.tenant
# Because self.created only gets set on save, we can't use it's value here
@ -453,6 +453,7 @@ class NotificationTransport(SerializerModel):
try:
from authentik.stages.email.tasks import send_mail
# pyright: reportGeneralTypeIssues=false
return send_mail(mail.__dict__) # pylint: disable=no-value-for-parameter
except (SMTPException, ConnectionError, OSError) as exc:
raise NotificationTransportError(exc) from exc

View File

@ -57,6 +57,10 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
LOGGER.debug("e(trigger): attempting to prevent infinite loop", trigger=trigger)
return
if not trigger.group:
LOGGER.debug("e(trigger): trigger has no group", trigger=trigger)
return
LOGGER.debug("e(trigger): checking if trigger applies", trigger=trigger)
try:
user = User.objects.filter(pk=event.user.get("pk")).first() or get_anonymous_user()
@ -73,10 +77,6 @@ def event_trigger_handler(event_uuid: str, trigger_name: str):
if not result.passing:
return
if not trigger.group:
LOGGER.debug("e(trigger): trigger has no group", trigger=trigger)
return
LOGGER.debug("e(trigger): event trigger matched", trigger=trigger)
# Create the notification objects
for transport in trigger.transports.all():

View File

@ -1,25 +1,17 @@
"""event tests"""
from urllib.parse import urlencode
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase
from django.views.debug import SafeExceptionReporterFilter
from django.test import TestCase
from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Group
from authentik.events.models import Event
from authentik.flows.views.executor import QS_QUERY
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.tenants.models import Tenant
class TestEvents(TestCase):
"""Test Event"""
def setUp(self) -> None:
self.factory = RequestFactory()
def test_new_with_model(self):
"""Create a new Event passing a model as kwarg"""
test_model = Group.objects.create(name="test")
@ -48,58 +40,3 @@ class TestEvents(TestCase):
model_content_type = ContentType.objects.get_for_model(temp_model)
self.assertEqual(event.context.get("model").get("app"), model_content_type.app_label)
self.assertEqual(event.context.get("model").get("pk"), temp_model.pk.hex)
def test_from_http_basic(self):
"""Test plain from_http"""
event = Event.new("unittest").from_http(self.factory.get("/"))
self.assertEqual(
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
)
def test_from_http_clean_querystring(self):
"""Test cleansing query string"""
request = self.factory.get(f"/?token={generate_id()}")
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.context,
{
"http_request": {
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
}
},
)
def test_from_http_clean_querystring_flow(self):
"""Test cleansing query string (nested query string like flow executor)"""
nested_qs = {"token": generate_id()}
request = self.factory.get(f"/?{QS_QUERY}={urlencode(nested_qs)}")
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.context,
{
"http_request": {
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
}
},
)
def test_from_http_tenant(self):
"""Test from_http tenant"""
# Test tenant
request = self.factory.get("/")
tenant = Tenant(domain="test-tenant")
setattr(request, "tenant", tenant)
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.tenant,
{
"app": "authentik_tenants",
"model_name": "tenant",
"name": "Tenant test-tenant",
"pk": tenant.pk.hex,
},
)

View File

@ -27,6 +27,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Stage) -> str:
"""Get object type so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Stage:
return ""
return obj.component

View File

@ -182,4 +182,5 @@ class HttpChallengeResponse(JsonResponse):
"""Subclass of JsonResponse that uses the `DataclassEncoder`"""
def __init__(self, challenge, **kwargs) -> None:
# pyright: reportGeneralTypeIssues=false
super().__init__(challenge.data, encoder=DataclassEncoder, **kwargs)

View File

@ -177,6 +177,7 @@ class ConfigLoader:
# Walk each component of the path
path_parts = path.split(sep)
for comp in path_parts[:-1]:
# pyright: reportGeneralTypeIssues=false
if comp not in root:
root[comp] = {}
root = root.get(comp, {})

View File

@ -67,7 +67,7 @@ def sentry_init(**sentry_init_kwargs):
ArgvIntegration(),
StdlibIntegration(),
DjangoIntegration(transaction_style="function_name"),
CeleryIntegration(),
CeleryIntegration(monitor_beat_tasks=True),
RedisIntegration(),
ThreadingIntegration(propagate_hub=True),
SocketIntegration(),

View File

@ -31,6 +31,7 @@ class ServiceConnectionSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: OutpostServiceConnection) -> str:
"""Get object type so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == OutpostServiceConnection:
return ""
return obj.component
@ -76,6 +77,7 @@ class ServiceConnectionViewSet(
data = []
for subclass in all_subclasses(self.queryset.model):
subclass: OutpostServiceConnection
# pyright: reportGeneralTypeIssues=false
data.append(
{
"name": subclass._meta.verbose_name,

View File

@ -13,6 +13,7 @@ from paramiko.ssh_exception import SSHException
from structlog.stdlib import get_logger
from yaml import safe_dump
from authentik import __version__
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException

View File

@ -22,7 +22,7 @@ from kubernetes.client import (
V1SecurityContext,
)
from authentik import get_full_version
from authentik import __version__, get_full_version
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate

View File

@ -128,7 +128,7 @@ class OutpostServiceConnection(models.Model):
@property
def state_key(self) -> str:
"""Key used to save connection state in cache"""
return f"goauthentik.io/outposts/service_connection_state/{self.pk.hex}"
return f"outpost_service_connection_{self.pk.hex}"
@property
def state(self) -> OutpostServiceConnectionState:
@ -278,7 +278,7 @@ class Outpost(SerializerModel, ManagedModel):
@property
def state_cache_prefix(self) -> str:
"""Key by which the outposts status is saved"""
return f"goauthentik.io/outposts/state/{self.uuid.hex}"
return f"goauthentik.io/outposts/{self.uuid.hex}_state"
@property
def state(self) -> list["OutpostState"]:
@ -433,19 +433,19 @@ class OutpostState:
@staticmethod
def for_outpost(outpost: Outpost) -> list["OutpostState"]:
"""Get all states for an outpost"""
keys = cache.keys(f"{outpost.state_cache_prefix}/*")
keys = cache.keys(f"{outpost.state_cache_prefix}_*")
if not keys:
return []
states = []
for key in keys:
instance_uid = key.replace(f"{outpost.state_cache_prefix}/", "")
instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "")
states.append(OutpostState.for_instance_uid(outpost, instance_uid))
return states
@staticmethod
def for_instance_uid(outpost: Outpost, uid: str) -> "OutpostState":
"""Get state for a single instance"""
key = f"{outpost.state_cache_prefix}/{uid}"
key = f"{outpost.state_cache_prefix}_{uid}"
default_data = {"uid": uid, "channel_ids": []}
data = cache.get(key, default_data)
if isinstance(data, str):
@ -458,10 +458,10 @@ class OutpostState:
def save(self, timeout=OUTPOST_HELLO_INTERVAL):
"""Save current state to cache"""
full_key = f"{self._outpost.state_cache_prefix}/{self.uid}"
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
return cache.set(full_key, asdict(self), timeout=timeout)
def delete(self):
"""Manually delete from cache, used on channel disconnect"""
full_key = f"{self._outpost.state_cache_prefix}/{self.uid}"
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
cache.delete(full_key)

View File

@ -45,7 +45,7 @@ from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesCont
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s"
CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s"
def controller_for_outpost(outpost: Outpost) -> Optional[type[BaseController]]:
@ -148,8 +148,6 @@ def outpost_controller(
except (ControllerException, ServiceConnectionInvalid) as exc:
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
else:
if from_cache:
cache.delete(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))

View File

@ -2,8 +2,7 @@
from django.urls import path
from authentik.outposts.channels import OutpostConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware
websocket_urlpatterns = [
path("ws/outpost/<uuid:pk>/", ChannelsLoggingMiddleware(OutpostConsumer.as_asgi())),
path("ws/outpost/<uuid:pk>/", OutpostConsumer.as_asgi()),
]

View File

@ -40,6 +40,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Policy) -> str: # pragma: no cover
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Policy:
return ""
return obj.component
@ -49,6 +50,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
return obj.bindings.count() + obj.promptstage_set.count()
def to_representation(self, instance: Policy):
# pyright: reportGeneralTypeIssues=false
if instance.__class__ == Policy or not self._resolve_inheritance:
return super().to_representation(instance)
return dict(instance.serializer(instance=instance, resolve_inheritance=False).data)

View File

@ -19,6 +19,7 @@ class AccessDeniedResponse(TemplateResponse):
error_message: Optional[str] = None
policy_result: Optional[PolicyResult] = None
# pyright: reportGeneralTypeIssues=false
def __init__(self, request: HttpRequest, template="policies/denied.html") -> None:
super().__init__(request, template)
self.title = _("Access denied")

View File

@ -74,6 +74,7 @@ class PolicyEngine:
def _check_policy_type(self, binding: PolicyBinding):
"""Check policy type, make sure it's not the root class as that has no logic implemented"""
# pyright: reportGeneralTypeIssues=false
if binding.policy is not None and binding.policy.__class__ == Policy:
raise PolicyEngineException(f"Policy '{binding.policy}' is root type")

View File

@ -15,8 +15,8 @@ class Migration(migrations.Migration):
name="policy_engine_mode",
field=models.TextField(
choices=[
("all", "all, all policies must pass"),
("any", "any, any policy must pass"),
("all", "ALL, all policies must pass"),
("any", "ANY, any policy must pass"),
],
default="all",
),
@ -27,8 +27,8 @@ class Migration(migrations.Migration):
name="policy_engine_mode",
field=models.TextField(
choices=[
("all", "all, all policies must pass"),
("any", "any, any policy must pass"),
("all", "ALL, all policies must pass"),
("any", "ANY, any policy must pass"),
],
default="any",
),

View File

@ -19,8 +19,10 @@ from authentik.policies.types import PolicyRequest, PolicyResult
class PolicyEngineMode(models.TextChoices):
"""Decide how results of multiple policies should be combined."""
MODE_ALL = "all", _("all, all policies must pass") # type: "PolicyEngineMode"
MODE_ANY = "any", _("any, any policy must pass") # type: "PolicyEngineMode"
# pyright: reportGeneralTypeIssues=false
MODE_ALL = "all", _("ALL, all policies must pass") # type: "PolicyEngineMode"
# pyright: reportGeneralTypeIssues=false
MODE_ANY = "any", _("ANY, any policy must pass") # type: "PolicyEngineMode"
class PolicyBindingModel(models.Model):

View File

@ -59,7 +59,6 @@ class TestPasswordPolicyFlow(FlowTestCase):
"label": "PASSWORD_LABEL",
"order": 0,
"placeholder": "PASSWORD_PLACEHOLDER",
"initial_value": "",
"required": True,
"type": "password",
"sub_text": "",

View File

@ -5,9 +5,7 @@ from django.dispatch import receiver
from structlog.stdlib import get_logger
from authentik.core.api.applications import user_app_cache_key
from authentik.core.models import Group, User
from authentik.policies.apps import GAUGE_POLICIES_CACHED
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel
from authentik.policies.types import CACHE_PREFIX
from authentik.root.monitoring import monitoring_set
@ -20,14 +18,12 @@ def monitoring_set_policies(sender, **kwargs):
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
@receiver(post_save, sender=Policy)
@receiver(post_save, sender=PolicyBinding)
@receiver(post_save, sender=PolicyBindingModel)
@receiver(post_save, sender=Group)
@receiver(post_save, sender=User)
@receiver(post_save)
def invalidate_policy_cache(sender, instance, **_):
"""Invalidate Policy cache when policy is updated"""
if sender == Policy:
from authentik.policies.models import Policy, PolicyBinding
if isinstance(instance, Policy):
total = 0
for binding in PolicyBinding.objects.filter(policy=instance):
prefix = f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}_{binding.policy.pk.hex}*"

View File

@ -1,8 +1,10 @@
# Generated by Django 3.1 on 2020-08-18 15:59
import django.db.models.deletion
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
import authentik.core.models
import authentik.lib.generators

View File

@ -1,24 +0,0 @@
# Generated by Django 4.1.7 on 2023-05-06 16:18
from django.db import migrations, models
import authentik.providers.oauth2.models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_oauth2",
"0015_accesstoken_auth_time_authorizationcode_auth_time_and_more",
),
]
operations = [
migrations.AlterField(
model_name="refreshtoken",
name="token",
field=models.TextField(
default=authentik.providers.oauth2.models.generate_client_secret
),
),
]

View File

@ -382,7 +382,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
"""OAuth2 Refresh Token, opaque"""
token = models.TextField(default=generate_client_secret)
token = models.TextField(default=generate_key)
_id_token = models.TextField(verbose_name=_("ID Token"))
@property

View File

@ -202,4 +202,5 @@ class HttpResponseRedirectScheme(HttpResponseRedirect):
**kwargs: Any,
) -> None:
self.allowed_schemes = allowed_schemes or ["http", "https", "ftp"]
# pyright: reportGeneralTypeIssues=false
super().__init__(redirect_to, *args, **kwargs)

View File

@ -68,7 +68,7 @@ from authentik.stages.consent.stage import (
LOGGER = get_logger()
PLAN_CONTEXT_PARAMS = "goauthentik.io/providers/oauth2/params"
PLAN_CONTEXT_PARAMS = "params"
SESSION_KEY_LAST_LOGIN_UID = "authentik/providers/oauth2/last_login_uid"
ALLOWED_PROMPT_PARAMS = {PROMPT_NONE, PROMPT_CONSENT, PROMPT_LOGIN}

View File

@ -8,7 +8,7 @@ from authentik.flows.stage import ChallengeStageView
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.providers.oauth2.models import DeviceToken
PLAN_CONTEXT_DEVICE = "goauthentik.io/providers/oauth2/device"
PLAN_CONTEXT_DEVICE = "device"
class OAuthDeviceCodeFinishChallenge(Challenge):

View File

@ -29,6 +29,9 @@ from authentik.providers.oauth2.utils import cors_allow
LOGGER = get_logger()
PLAN_CONTEXT_PARAMS = "params"
PLAN_CONTEXT_SCOPES = "scopes"
class ProviderInfoView(View):
"""OpenID-compliant Provider Info"""

View File

@ -2,12 +2,19 @@
from typing import Generic, TypeVar
from pydantic import ValidationError
from pydanticscim.service_provider import (
Bulk,
ChangePassword,
Filter,
Patch,
ServiceProviderConfiguration,
Sort,
)
from requests import RequestException, Session
from structlog.stdlib import get_logger
from authentik.lib.utils.http import get_http_session
from authentik.providers.scim.clients.exceptions import ResourceMissing, SCIMRequestException
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
from authentik.providers.scim.clients.exceptions import SCIMRequestException
from authentik.providers.scim.models import SCIMProvider
T = TypeVar("T")
@ -15,6 +22,18 @@ T = TypeVar("T")
SchemaType = TypeVar("SchemaType")
def default_service_provider_config() -> ServiceProviderConfiguration:
"""Fallback service provider configuration"""
return ServiceProviderConfiguration(
patch=Patch(supported=False),
bulk=Bulk(supported=False),
filter=Filter(supported=False),
changePassword=ChangePassword(supported=False),
sort=Sort(supported=False),
authenticationSchemes=[],
)
class SCIMClient(Generic[T, SchemaType]):
"""SCIM Client"""
@ -54,8 +73,6 @@ class SCIMClient(Generic[T, SchemaType]):
raise SCIMRequestException(None) from exc
self.logger.debug("scim request", path=path, method=method, **kwargs)
if response.status_code >= 400:
if response.status_code == 404:
raise ResourceMissing(response)
self.logger.warning(
"Failed to send SCIM request", path=path, method=method, response=response.text
)
@ -66,7 +83,7 @@ class SCIMClient(Generic[T, SchemaType]):
def get_service_provider_config(self):
"""Get Service provider config"""
default_config = ServiceProviderConfiguration.default()
default_config = default_service_provider_config()
try:
return ServiceProviderConfiguration.parse_obj(
self._request("GET", "/ServiceProviderConfig")

View File

@ -41,8 +41,3 @@ class SCIMRequestException(SentryIgnoredException):
except ValidationError:
pass
return super().__str__()
class ResourceMissing(SCIMRequestException):
"""Error raised when the provider raises a 404, meaning that we
should delete our internal ID and re-create the object"""

View File

@ -2,7 +2,7 @@
from deepmerge import always_merger
from pydantic import ValidationError
from pydanticscim.group import GroupMember
from pydanticscim.responses import PatchOp, PatchOperation
from pydanticscim.responses import PatchOp, PatchOperation, PatchRequest
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.models import Group
@ -10,13 +10,8 @@ from authentik.events.models import Event, EventAction
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.utils import delete_none_keys
from authentik.providers.scim.clients.base import SCIMClient
from authentik.providers.scim.clients.exceptions import (
ResourceMissing,
SCIMRequestException,
StopSync,
)
from authentik.providers.scim.clients.exceptions import StopSync
from authentik.providers.scim.clients.schema import Group as SCIMGroupSchema
from authentik.providers.scim.clients.schema import PatchRequest
from authentik.providers.scim.models import SCIMGroup, SCIMMapping, SCIMUser
@ -28,11 +23,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
scim_group = SCIMGroup.objects.filter(provider=self.provider, group=obj).first()
if not scim_group:
return self._create(obj)
try:
return self._update(obj, scim_group)
except ResourceMissing:
scim_group.delete()
return self._create(obj)
scim_group = self.to_scim(obj)
scim_group.id = scim_group.id
return self._request(
"PUT",
f"/Groups/{scim_group.id}",
data=scim_group.json(
exclude_unset=True,
),
)
def delete(self, obj: Group):
"""Delete group"""
@ -82,15 +81,12 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
users = list(obj.users.order_by("id").values_list("id", flat=True))
connections = SCIMUser.objects.filter(provider=self.provider, user__pk__in=users)
members = []
for user in connections:
members.append(
scim_group.members.append(
GroupMember(
value=user.id,
)
)
if members:
scim_group.members = members
return scim_group
def _create(self, group: Group):
@ -105,32 +101,13 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
)
SCIMGroup.objects.create(provider=self.provider, group=group, id=response["id"])
def _update(self, group: Group, connection: SCIMGroup):
"""Update existing group"""
scim_group = self.to_scim(group)
scim_group.id = connection.id
try:
return self._request(
"PUT",
f"/Groups/{scim_group.id}",
data=scim_group.json(
exclude_unset=True,
),
)
except SCIMRequestException:
# Some providers don't support PUT on groups, so this is mainly a fix for the initial
# sync, send patch add requests for all the users the group currently has
# TODO: send patch request for group name
users = list(group.users.order_by("id").values_list("id", flat=True))
return self._patch_add_users(group, users)
def _patch(
self,
group_id: str,
*ops: PatchOperation,
):
req = PatchRequest(Operations=ops)
self._request("PATCH", f"/Groups/{group_id}", data=req.json())
self._request("PATCH", f"/Groups/{group_id}", data=req.json(exclude_unset=True))
def update_group(self, group: Group, action: PatchOp, users_set: set[int]):
"""Update a group, either using PUT to replace it or PATCH if supported"""
@ -139,17 +116,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
return self._patch_add_users(group, users_set)
if action == PatchOp.remove:
return self._patch_remove_users(group, users_set)
try:
return self.write(group)
except SCIMRequestException as exc:
if self._config.is_fallback:
# Assume that provider does not support PUT and also doesn't support
# ServiceProviderConfig, so try PATCH as a fallback
if action == PatchOp.add:
return self._patch_add_users(group, users_set)
if action == PatchOp.remove:
return self._patch_remove_users(group, users_set)
raise exc
return self.write(group)
def _patch_add_users(self, group: Group, users_set: set[int]):
"""Add users in users_set to group"""
@ -166,8 +133,6 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
"id", flat=True
)
)
if len(user_ids) < 1:
return
self._patch(
scim_group.id,
PatchOperation(
@ -192,8 +157,6 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
"id", flat=True
)
)
if len(user_ids) < 1:
return
self._patch(
scim_group.id,
PatchOperation(

View File

@ -1,54 +1,17 @@
"""Custom SCIM schemas"""
from typing import Optional
from pydanticscim.group import Group as BaseGroup
from pydanticscim.responses import PatchRequest as BasePatchRequest
from pydanticscim.service_provider import Bulk, ChangePassword, Filter, Patch
from pydanticscim.service_provider import (
ServiceProviderConfiguration as BaseServiceProviderConfiguration,
)
from pydanticscim.service_provider import Sort
from pydanticscim.user import User as BaseUser
from pydanticscim.group import Group as SCIMGroupSchema
from pydanticscim.user import User as SCIMUserSchema
class User(BaseUser):
class User(SCIMUserSchema):
"""Modified User schema with added externalId field"""
externalId: Optional[str] = None
class Group(BaseGroup):
class Group(SCIMGroupSchema):
"""Modified Group schema with added externalId field"""
externalId: Optional[str] = None
class ServiceProviderConfiguration(BaseServiceProviderConfiguration):
"""ServiceProviderConfig with fallback"""
_is_fallback: Optional[bool] = False
@property
def is_fallback(self) -> bool:
"""Check if this service provider config was retrieved from the API endpoint
or a fallback was used"""
return self._is_fallback
@staticmethod
def default() -> "ServiceProviderConfiguration":
"""Get default configuration, which doesn't support any optional features as fallback"""
return ServiceProviderConfiguration(
patch=Patch(supported=False),
bulk=Bulk(supported=False),
filter=Filter(supported=False),
changePassword=ChangePassword(supported=False),
sort=Sort(supported=False),
authenticationSchemes=[],
_is_fallback=True,
)
class PatchRequest(BasePatchRequest):
"""PatchRequest which correctly sets schemas"""
schemas: tuple[str] = ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]

View File

@ -8,7 +8,7 @@ from authentik.events.models import Event, EventAction
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.utils import delete_none_keys
from authentik.providers.scim.clients.base import SCIMClient
from authentik.providers.scim.clients.exceptions import ResourceMissing, StopSync
from authentik.providers.scim.clients.exceptions import StopSync
from authentik.providers.scim.clients.schema import User as SCIMUserSchema
from authentik.providers.scim.models import SCIMMapping, SCIMUser
@ -21,11 +21,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
scim_user = SCIMUser.objects.filter(provider=self.provider, user=obj).first()
if not scim_user:
return self._create(obj)
try:
return self._update(obj, scim_user)
except ResourceMissing:
scim_user.delete()
return self._create(obj)
return self._update(obj, scim_user)
def delete(self, obj: User):
"""Delete user"""

View File

@ -1,23 +0,0 @@
"""SCIM Sync"""
from django.core.management.base import BaseCommand
from structlog.stdlib import get_logger
from authentik.providers.scim.models import SCIMProvider
from authentik.providers.scim.tasks import scim_sync
LOGGER = get_logger()
class Command(BaseCommand):
"""Run sync for an SCIM Provider"""
def add_arguments(self, parser):
parser.add_argument("providers", nargs="+", type=str)
def handle(self, **options):
for provider_name in options["providers"]:
provider = SCIMProvider.objects.filter(name=provider_name).first()
if not provider:
LOGGER.warning("Provider does not exist", name=provider_name)
continue
scim_sync.delay(provider.pk).get()

View File

@ -1,9 +1,9 @@
"""SCIM Provider tasks"""
from typing import Any, Optional
from typing import Any
from celery.result import allow_join_result
from django.core.paginator import Paginator
from django.db.models import Model, QuerySet
from django.db.models import Model
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from pydanticscim.responses import PatchOp
@ -94,8 +94,7 @@ def scim_sync_users(page: int, provider_pk: int):
}
)
)
except StopSync as exc:
LOGGER.warning("Stopping sync", exc=exc)
except StopSync:
break
return messages
@ -127,8 +126,7 @@ def scim_sync_group(page: int, provider_pk: int):
}
)
)
except StopSync as exc:
LOGGER.warning("Stopping sync", exc=exc)
except StopSync:
break
return messages
@ -143,20 +141,6 @@ def scim_signal_direct(model: str, pk: Any, raw_op: str):
operation = PatchOp(raw_op)
for provider in SCIMProvider.objects.all():
client = client_for_model(provider, instance)
# Check if the object is allowed within the provider's restrictions
queryset: Optional[QuerySet] = None
if isinstance(instance, User):
queryset = provider.get_user_qs()
if isinstance(instance, Group):
queryset = provider.get_group_qs()
if not queryset:
continue
# The queryset we get from the provider must include the instance we've got given
# otherwise ignore this provider
if not queryset.filter(pk=instance.pk).exists():
continue
try:
if operation == PatchOp.add:
client.write(instance)
@ -173,13 +157,6 @@ def scim_signal_m2m(group_pk: str, action: str, pk_set: list[int]):
if not group:
return
for provider in SCIMProvider.objects.all():
# Check if the object is allowed within the provider's restrictions
queryset: QuerySet = provider.get_group_qs()
# The queryset we get from the provider must include the instance we've got given
# otherwise ignore this provider
if not queryset.filter(pk=group_pk).exists():
continue
client = SCIMGroupClient(provider)
try:
operation = None

View File

@ -6,7 +6,7 @@ from requests_mock import Mocker
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import Group, User
from authentik.lib.generators import generate_id
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
from authentik.providers.scim.clients.base import default_service_provider_config
from authentik.providers.scim.models import SCIMMapping, SCIMProvider
from authentik.providers.scim.tasks import scim_sync
@ -39,7 +39,7 @@ class SCIMMembershipTests(TestCase):
def test_member_add(self):
"""Test member add"""
config = ServiceProviderConfiguration.default()
config = default_service_provider_config()
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -117,14 +117,13 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)
def test_member_remove(self):
"""Test member remove"""
config = ServiceProviderConfiguration.default()
config = default_service_provider_config()
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -202,8 +201,7 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)
@ -229,7 +227,6 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)

View File

@ -14,6 +14,7 @@ class ChannelsStorage(SessionStorage):
"""Send contrib.messages over websocket"""
def __init__(self, request: HttpRequest) -> None:
# pyright: reportGeneralTypeIssues=false
super().__init__(request)
self.channel = get_channel_layer()

View File

@ -1,7 +1,6 @@
"""Dynamically set SameSite depending if the upstream connection is TLS or not"""
from hashlib import sha512
from time import time
from timeit import default_timer
from typing import Callable
from django.conf import settings
@ -131,28 +130,6 @@ class SessionMiddleware(UpstreamSessionMiddleware):
return response
class ChannelsLoggingMiddleware:
"""Logging middleware for channels"""
def __init__(self, inner):
self.inner = inner
async def __call__(self, scope, receive, send):
self.log(scope)
return await self.inner(scope, receive, send)
def log(self, scope: dict, **kwargs):
"""Log request"""
headers = dict(scope.get("headers", {}))
LOGGER.info(
scope["path"],
scheme="ws",
remote=scope.get("client", [""])[0],
user_agent=headers.get(b"user-agent", b"").decode(),
**kwargs,
)
class LoggingMiddleware:
"""Logger middleware"""
@ -162,14 +139,14 @@ class LoggingMiddleware:
self.get_response = get_response
def __call__(self, request: HttpRequest) -> HttpResponse:
start = default_timer()
start = time()
response = self.get_response(request)
status_code = response.status_code
kwargs = {
"request_id": request.request_id,
}
kwargs.update(getattr(response, "ak_context", {}))
self.log(request, status_code, int((default_timer() - start) * 1000), **kwargs)
self.log(request, status_code, int((time() - start) * 1000), **kwargs)
return response
def log(self, request: HttpRequest, status_code: int, runtime: int, **kwargs):

View File

@ -492,12 +492,3 @@ if DEBUG:
INSTALLED_APPS.append("authentik.core")
CONFIG.log("info", "Booting authentik", version=__version__)
# Attempt to load enterprise app, if available
try:
importlib.import_module("authentik.enterprise.apps")
CONFIG.log("info", "Enabled authentik enterprise")
INSTALLED_APPS.append("authentik.enterprise")
_update_settings("authentik.enterprise.settings")
except ImportError:
pass

View File

@ -12,13 +12,12 @@ AUTHENTIK_SOURCES_OAUTH_TYPES = [
"authentik.sources.oauth.types.facebook",
"authentik.sources.oauth.types.github",
"authentik.sources.oauth.types.google",
"authentik.sources.oauth.types.mailcow",
"authentik.sources.oauth.types.oidc",
"authentik.sources.oauth.types.okta",
"authentik.sources.oauth.types.patreon",
"authentik.sources.oauth.types.reddit",
"authentik.sources.oauth.types.twitch",
"authentik.sources.oauth.types.twitter",
"authentik.sources.oauth.types.mailcow",
"authentik.sources.oauth.types.twitch",
]

View File

@ -66,7 +66,7 @@ class OAuthClient(BaseOAuthClient):
response.raise_for_status()
except RequestException as exc:
raise OAuthSourceException(
exc.response.text if exc.response else str(exc),
response=exc.response.text if exc.response else str(exc),
) from exc
return response.text

View File

@ -163,15 +163,6 @@ class DiscordOAuthSource(OAuthSource):
verbose_name_plural = _("Discord OAuth Sources")
class PatreonOAuthSource(OAuthSource):
"""Social Login using Patreon."""
class Meta:
abstract = True
verbose_name = _("Patreon OAuth Source")
verbose_name_plural = _("Patreon OAuth Sources")
class GoogleOAuthSource(OAuthSource):
"""Social Login using Google or Google Workspace (GSuite)."""

View File

@ -1,67 +0,0 @@
"""Patreon Type tests"""
from django.test import RequestFactory, TestCase
from authentik.sources.oauth.models import OAuthSource
from authentik.sources.oauth.types.patreon import PatreonOAuthCallback
PATREON_USER = {
"data": {
"attributes": {
"about": None,
"created": "2017-10-20T21:36:23+00:00",
"discord_id": None,
"email": "corgi@example.com",
"facebook": None,
"facebook_id": None,
"first_name": "Corgi",
"full_name": "Corgi The Dev",
"gender": 0,
"has_password": True,
"image_url": "https://c8.patreon.com/2/400/0000000",
"is_deleted": False,
"is_email_verified": False,
"is_nuked": False,
"is_suspended": False,
"last_name": "The Dev",
"social_connections": {
"deviantart": None,
"discord": None,
"facebook": None,
"reddit": None,
"spotify": None,
"twitch": None,
"twitter": None,
"youtube": None,
},
"thumb_url": "https://c8.patreon.com/2/100/0000000",
"twitch": None,
"twitter": None,
"url": "https://www.patreon.com/corgithedev",
"vanity": "corgithedev",
"youtube": None,
},
"id": "0000000",
"relationships": {"pledges": {"data": []}},
"type": "user",
},
"links": {"self": "https://www.patreon.com/api/user/0000000"},
}
class TestTypePatreon(TestCase):
"""OAuth Source tests"""
def setUp(self):
self.source = OAuthSource.objects.create(
name="test",
slug="test",
provider_type="Patreon",
)
self.factory = RequestFactory()
def test_enroll_context(self):
"""Test Patreon Enrollment context"""
ak_context = PatreonOAuthCallback().get_user_enroll_context(PATREON_USER)
self.assertEqual(ak_context["username"], PATREON_USER["data"]["attributes"]["vanity"])
self.assertEqual(ak_context["email"], PATREON_USER["data"]["attributes"]["email"])
self.assertEqual(ak_context["name"], PATREON_USER["data"]["attributes"]["full_name"])

View File

@ -59,6 +59,7 @@ class AppleOAuthClient(OAuth2Client):
"aud": "https://appleid.apple.com",
"sub": parts[0].strip(),
}
# pyright: reportGeneralTypeIssues=false
jwt = encode(payload, self.source.consumer_secret, "ES256", {"kid": parts[2].strip()})
LOGGER.debug("signing payload as secret key", payload=payload, jwt=jwt)
return jwt

View File

@ -1,50 +0,0 @@
"""Patreon OAuth Views"""
from typing import Any
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
from authentik.sources.oauth.models import OAuthSource
from authentik.sources.oauth.types.registry import SourceType, registry
from authentik.sources.oauth.views.callback import OAuthCallback
from authentik.sources.oauth.views.redirect import OAuthRedirect
class PatreonOAuthRedirect(OAuthRedirect):
"""Patreon OAuth2 Redirect"""
def get_additional_parameters(self, source: OAuthSource): # pragma: no cover
return {
"scope": ["openid", "email", "profile"],
}
class PatreonOAuthCallback(OAuthCallback):
"""Patreon OAuth2 Callback"""
client_class: UserprofileHeaderAuthClient
def get_user_id(self, info: dict[str, str]) -> str:
return info.get("data", {}).get("id")
def get_user_enroll_context(
self,
info: dict[str, Any],
) -> dict[str, Any]:
return {
"username": info.get("data", {}).get("attributes", {}).get("vanity"),
"email": info.get("data", {}).get("attributes", {}).get("email"),
"name": info.get("data", {}).get("attributes", {}).get("full_name"),
}
@registry.register()
class PatreonType(SourceType):
"""OpenIDConnect Type definition"""
callback_view = PatreonOAuthCallback
redirect_view = PatreonOAuthRedirect
name = "Patreon"
slug = "patreon"
authorization_url = "https://www.patreon.com/oauth2/authorize"
access_token_url = "https://www.patreon.com/api/oauth2/token" # nosec
profile_url = "https://www.patreon.com/api/oauth2/api/current_user"

View File

@ -3,6 +3,8 @@
import django.contrib.postgres.fields
from django.db import migrations, models
import authentik.lib.generators
class Migration(migrations.Migration):
dependencies = [

View File

@ -40,7 +40,11 @@ from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
from authentik.sources.saml.processors.metadata import MetadataProcessor
from authentik.sources.saml.processors.request import RequestProcessor
from authentik.sources.saml.processors.response import ResponseProcessor
from authentik.stages.consent.stage import PLAN_CONTEXT_CONSENT_HEADER, ConsentStageView
from authentik.stages.consent.stage import (
PLAN_CONTEXT_CONSENT_HEADER,
PLAN_CONTEXT_CONSENT_TITLE,
ConsentStageView,
)
LOGGER = get_logger()
@ -124,6 +128,7 @@ class InitiateView(View):
injected_stages = []
plan_kwargs = {
PLAN_CONTEXT_TITLE: f"Redirecting to {source.name}...",
PLAN_CONTEXT_CONSENT_TITLE: f"Redirecting to {source.name}...",
PLAN_CONTEXT_ATTRS: {
"SAMLRequest": saml_request,
"RelayState": relay_state,

View File

@ -10,6 +10,7 @@ from duo_client.admin import Admin
from duo_client.auth import Auth
from rest_framework.serializers import BaseSerializer, Serializer
from authentik import __version__
from authentik.core.types import UserSettingSerializer
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
from authentik.lib.models import SerializerModel

Some files were not shown because too many files have changed in this diff Show More