Compare commits

..

3 Commits

Author SHA1 Message Date
4d791f4fef release: 2022.12.3 2023-03-02 20:18:21 +01:00
c03a069f02 security: fix CVE-2023-26481 (#4832)
fix CVE-2023-26481

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-03-02 20:16:01 +01:00
040adb8ce7 website: always show build version in version dropdown
Signed-off-by: Jens Langhammer <jens@goauthentik.io>

#3940

# Conflicts:
#	website/docusaurus.config.js
2023-02-16 14:40:07 +01:00
456 changed files with 8360 additions and 14147 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.1.2
current_version = 2022.12.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -38,14 +38,6 @@ runs:
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
For arm64, use these values:
```shell
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
<details>
@ -62,17 +54,6 @@ runs:
tag: ${{ inputs.tag }}
```
For arm64, use these values:
```yaml
authentik:
outposts:
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
image:
repository: ghcr.io/goauthentik/dev-server
tag: ${{ inputs.tag }}-arm64
```
Afterwards, run the upgrade commands from the latest release notes.
</details>
edit-mode: replace

View File

@ -17,9 +17,6 @@ outputs:
sha:
description: "sha"
value: ${{ steps.ev.outputs.sha }}
shortHash:
description: "shortHash"
value: ${{ steps.ev.outputs.shortHash }}
version:
description: "version"
value: ${{ steps.ev.outputs.version }}
@ -56,7 +53,6 @@ runs:
print("branchNameContainer=%s" % safe_branch_name, file=_output)
print("timestamp=%s" % int(time()), file=_output)
print("sha=%s" % os.environ["GITHUB_SHA"], file=_output)
print("shortHash=%s" % os.environ["GITHUB_SHA"][:7], file=_output)
print("shouldBuild=%s" % should_build, file=_output)
print("version=%s" % version, file=_output)
print("versionFamily=%s" % version_family, file=_output)

View File

@ -102,31 +102,14 @@ jobs:
uses: helm/kind-action@v1.5.0
- name: run integration
run: |
poetry run coverage run manage.py test tests/integration
poetry run make test-integration
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
with:
flags: integration
test-e2e:
name: test-e2e (${{ matrix.job.name }})
test-e2e-provider:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
job:
- name: proxy
glob: tests/e2e/test_provider_proxy*
- name: oauth
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
- name: oauth-oidc
glob: tests/e2e/test_provider_oidc*
- name: saml
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: flows
glob: tests/e2e/test_flows*
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
@ -148,7 +131,36 @@ jobs:
npm run build
- name: run e2e
run: |
poetry run coverage run manage.py test ${{ matrix.job.glob }}
poetry run make test-e2e-provider
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
with:
flags: e2e
test-e2e-rest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
run: |
docker-compose -f tests/e2e/docker-compose.yml up -d
- id: cache-web
uses: actions/cache@v3
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web/
run: |
npm ci
make -C .. gen-client-ts
npm run build
- name: run e2e
run: |
poetry run make test-e2e-rest
poetry run coverage xml
- if: ${{ always() }}
uses: codecov/codecov-action@v3
@ -161,7 +173,8 @@ jobs:
- test-migrations-from-stable
- test-unittest
- test-integration
- test-e2e
- test-e2e-rest
- test-e2e-provider
runs-on: ubuntu-latest
steps:
- run: echo mark
@ -169,6 +182,11 @@ jobs:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
arch:
- 'linux/amd64'
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
@ -187,7 +205,7 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
secrets: |
@ -196,49 +214,14 @@ jobs:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: ${{ matrix.arch }}
- name: Comment on PR
if: github.event_name == 'pull_request'
continue-on-error: true
uses: ./.github/actions/comment-pr-instructions
with:
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
build-arm64:
needs: ci-core-mark
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Login to Container Registry
uses: docker/login-action@v2
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@v3
with:
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-arm64
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/arm64
tag: gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.sha }}

View File

@ -28,8 +28,6 @@ jobs:
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
args: --timeout 5000s
test-unittest:
runs-on: ubuntu-latest
steps:
@ -82,12 +80,13 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Build Docker Image
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
tags: |
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}
ghcr.io/goauthentik/dev-${{ matrix.type }}:gh-${{ steps.ev.outputs.sha }}
file: ${{ matrix.type }}.Dockerfile
build-args: |
@ -112,7 +111,7 @@ jobs:
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'

View File

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
@ -31,7 +31,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
@ -47,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
@ -63,7 +63,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'
@ -95,7 +95,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'

View File

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'

View File

@ -11,12 +11,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@v1
uses: sondrelg/container-retention-policy@v1
with:
image-names: dev-server,dev-ldap,dev-proxy
cut-off: One week ago UTC
account-type: org
org-name: goauthentik
untagged-only: false
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GHCR_CLEANUP_TOKEN }}
skip-tags: gh-next,gh-main

View File

@ -27,11 +27,11 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
secrets: |
secrets:
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
tags: |
@ -75,7 +75,7 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
- name: Building Docker Image
uses: docker/build-push-action@v3
with:
push: ${{ github.event_name == 'release' }}
@ -88,6 +88,9 @@ jobs:
ghcr.io/goauthentik/${{ matrix.type }}:latest
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
secrets: |
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
build-args: |
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost-binary:
@ -106,7 +109,7 @@ jobs:
- uses: actions/setup-go@v3
with:
go-version: "^1.17"
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
cache: 'npm'

View File

@ -26,14 +26,14 @@ jobs:
id: get_version
uses: actions/github-script@v6
with:
github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
- name: Create Release
id: create_release
uses: actions/create-release@v1.1.4
env:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ steps.get_version.outputs.result }}

View File

@ -19,8 +19,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
@ -29,7 +27,7 @@ jobs:
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
branch: compile-backend-translation
commit-message: "core: compile backend translations"
title: "core: compile backend translations"

View File

@ -10,9 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- uses: actions/setup-node@v3.6.0
- uses: actions/setup-node@v3.5.1
with:
node-version: '16'
registry-url: 'https://registry.npmjs.org'
@ -30,20 +28,14 @@ jobs:
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@v4
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
id: cpr
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
team-reviewers: "@goauthentik/core"
author: authentik bot <github-bot@goauthentik.io>
- uses: peter-evans/enable-pull-request-automerge@v2
with:
token: ${{ secrets.BOT_GITHUB_TOKEN }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -14,9 +14,7 @@
"webauthn",
"traefik",
"passwordless",
"kubernetes",
"sso",
"slo"
"kubernetes"
],
"python.linting.pylintEnabled": true,
"todo-tree.tree.showCountsInTree": true,

View File

@ -59,18 +59,19 @@ These are the current packages:
authentik
├── admin - Administrative tasks and APIs, no models (Version updates, Metrics, system tasks)
├── api - General API Configuration (Routes, Schema and general API utilities)
├── blueprints - Handle managed models and their state.
├── core - Core authentik functionality, central routes, core Models
├── crypto - Cryptography, currently used to generate and hold Certificates and Private Keys
├── events - Event Log, middleware and signals to generate signals
├── flows - Flows, the FlowPlanner and the FlowExecutor, used for all flows for authentication, authorization, etc
├── lib - Generic library of functions, few dependencies on other packages.
├── managed - Handle managed models and their state.
├── outposts - Configure and deploy outposts on kubernetes and docker.
├── policies - General PolicyEngine
│   ├── dummy - A Dummy policy used for testing
│   ├── event_matcher - Match events based on different criteria
│   ├── expiry - Check when a user's password was last set
│   ├── expression - Execute any arbitrary python code
│   ├── hibp - Check a password against HaveIBeenPwned
│   ├── password - Check a password against several rules
│   └── reputation - Check the user's/client's reputation
├── providers

View File

@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.19.5-bullseye AS go-builder
FROM docker.io/golang:1.19.4-bullseye AS go-builder
WORKDIR /work
@ -50,7 +50,6 @@ RUN go build -o /work/authentik ./cmd/server/
FROM docker.io/maxmindinc/geoipupdate:v4.10 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
ENV GEOIPUPDATE_VERBOSE="true"
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
@ -58,7 +57,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "\
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
/usr/bin/entry.sh || exit 0 \
"
# Stage 6: Run

View File

@ -6,6 +6,15 @@ NPM_VERSION = $(shell python -m scripts.npm_version)
all: lint-fix lint test gen web
test-integration:
coverage run manage.py test tests/integration
test-e2e-provider:
coverage run manage.py test tests/e2e/test_provider*
test-e2e-rest:
coverage run manage.py test tests/e2e/test_flows* tests/e2e/test_source*
test-go:
go test -timeout 0 -v -race -cover ./...

View File

@ -6,8 +6,8 @@ Authentik takes security very seriously. We follow the rules of [responsible dis
| Version | Supported |
| --------- | ------------------ |
| 2022.11.x | :white_check_mark: |
| 2022.12.x | :white_check_mark: |
| 2023.1.x | :white_check_mark: |
## Reporting a Vulnerability

View File

@ -2,7 +2,7 @@
from os import environ
from typing import Optional
__version__ = "2023.1.2"
__version__ = "2022.12.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -1,7 +1,4 @@
"""authentik administration metrics"""
from datetime import timedelta
from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
@ -24,44 +21,38 @@ class CoordinateSerializer(PassiveSerializer):
class LoginMetricsSerializer(PassiveSerializer):
"""Login Metrics per 1h"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
logins_per_1h = SerializerMethodField()
logins_failed_per_1h = SerializerMethodField()
authorizations_per_1h = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get successful authorizations per 8 hours for the last 7 days"""
def get_authorizations_per_1h(self, _):
"""Get successful authorizations per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION)
.get_events_per_hour()
)

View File

@ -7,13 +7,7 @@ from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import (
CharField,
ChoiceField,
DateTimeField,
ListField,
SerializerMethodField,
)
from rest_framework.fields import CharField, ChoiceField, DateTimeField, ListField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
@ -32,7 +26,6 @@ class TaskSerializer(PassiveSerializer):
task_name = CharField()
task_description = CharField()
task_finish_timestamp = DateTimeField(source="finish_time")
task_duration = SerializerMethodField()
status = ChoiceField(
source="result.status.name",
@ -40,11 +33,7 @@ class TaskSerializer(PassiveSerializer):
)
messages = ListField(source="result.messages")
def get_task_duration(self, instance: TaskInfo) -> int:
"""Get the duration a task took to run"""
return max(instance.finish_timestamp - instance.start_timestamp, 0)
def to_representation(self, instance: TaskInfo):
def to_representation(self, instance):
"""When a new version of authentik adds fields to TaskInfo,
the API will fail with an AttributeError, as the classes
are pickled in cache. In that case, just delete the info"""
@ -79,6 +68,7 @@ class TaskViewSet(ViewSet):
),
],
)
# pylint: disable=invalid-name
def retrieve(self, request: Request, pk=None) -> Response:
"""Get a single system task"""
task = TaskInfo.by_name(pk)
@ -109,6 +99,7 @@ class TaskViewSet(ViewSet):
],
)
@action(detail=True, methods=["post"])
# pylint: disable=invalid-name
def retry(self, request: Request, pk=None) -> Response:
"""Retry task"""
task = TaskInfo.by_name(pk)

View File

@ -8,6 +8,7 @@ from authentik.root.monitoring import monitoring_set
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_workers(sender, **kwargs):
"""Set worker gauge"""
count = len(CELERY_APP.control.ping(timeout=0.5))
@ -15,6 +16,7 @@ def monitoring_set_workers(sender, **kwargs):
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_tasks(sender, **kwargs):
"""Set task gauges"""
for task in TaskInfo.all().values():

View File

@ -31,16 +31,6 @@ def validate_auth(header: bytes) -> Optional[str]:
def bearer_auth(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
user = auth_user_lookup(raw_header)
if not user:
return None
if not user.is_active:
raise AuthenticationFailed("Token invalid/expired")
return user
def auth_user_lookup(raw_header: bytes) -> Optional[User]:
"""raw_header in the Format of `Bearer ....`"""
from authentik.providers.oauth2.models import RefreshToken

View File

@ -62,7 +62,7 @@ window.addEventListener('DOMContentLoaded', (event) => {
allow-spec-url-load="false"
allow-spec-file-load="false">
<div slot="nav-logo">
<img alt="authentik Logo" class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
<img class="logo" src="{% static 'dist/assets/icons/icon_left_brand.png' %}" />
</div>
</rapi-doc>
<script>

View File

@ -3,12 +3,13 @@ from base64 import b64encode
from django.conf import settings
from django.test import TestCase
from guardian.shortcuts import get_anonymous_user
from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import OAuth2Provider, RefreshToken
@ -35,18 +36,9 @@ class TestAPIAuth(TestCase):
def test_bearer_valid(self):
"""Test valid token"""
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=create_test_admin_user())
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=get_anonymous_user())
self.assertEqual(bearer_auth(f"Bearer {token.key}".encode()), token.user)
def test_bearer_valid_deactivated(self):
"""Test valid token"""
user = create_test_admin_user()
user.is_active = False
user.save()
token = Token.objects.create(intent=TokenIntents.INTENT_API, user=user)
with self.assertRaises(AuthenticationFailed):
bearer_auth(f"Bearer {token.key}".encode())
def test_managed_outpost(self):
"""Test managed outpost"""
with self.assertRaises(AuthenticationFailed):
@ -64,7 +56,7 @@ class TestAPIAuth(TestCase):
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = RefreshToken.objects.create(
user=create_test_admin_user(),
user=get_anonymous_user(),
provider=provider,
refresh_token=generate_id(),
_scope=SCOPE_AUTHENTIK_API,
@ -77,7 +69,7 @@ class TestAPIAuth(TestCase):
name=generate_id(), client_id=generate_id(), authorization_flow=create_test_flow()
)
refresh = RefreshToken.objects.create(
user=create_test_admin_user(),
user=get_anonymous_user(),
provider=provider,
refresh_token=generate_id(),
_scope="",

View File

@ -45,6 +45,7 @@ from authentik.policies.dummy.api import DummyPolicyViewSet
from authentik.policies.event_matcher.api import EventMatcherPolicyViewSet
from authentik.policies.expiry.api import PasswordExpiryPolicyViewSet
from authentik.policies.expression.api import ExpressionPolicyViewSet
from authentik.policies.hibp.api import HaveIBeenPwendPolicyViewSet
from authentik.policies.password.api import PasswordPolicyViewSet
from authentik.policies.reputation.api import ReputationPolicyViewSet, ReputationViewSet
from authentik.providers.ldap.api import LDAPOutpostConfigViewSet, LDAPProviderViewSet
@ -149,6 +150,7 @@ router.register("policies/all", PolicyViewSet)
router.register("policies/bindings", PolicyBindingViewSet)
router.register("policies/expression", ExpressionPolicyViewSet)
router.register("policies/event_matcher", EventMatcherPolicyViewSet)
router.register("policies/haveibeenpwned", HaveIBeenPwendPolicyViewSet)
router.register("policies/password_expiry", PasswordExpiryPolicyViewSet)
router.register("policies/password", PasswordPolicyViewSet)
router.register("policies/reputation/scores", ReputationViewSet)

View File

@ -1,5 +1,4 @@
"""Serializer mixin for managed models"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
@ -12,7 +11,6 @@ from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
@ -42,21 +40,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
raise ValidationError(exc) from exc
return path
def validate_content(self, content: str) -> str:
"""Ensure content (if set) is a valid blueprint"""
if content == "":
return content
context = self.instance.context if self.instance else {}
valid, logs = Importer(content, context).validate()
if not valid:
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
return content
def validate(self, attrs: dict) -> dict:
if attrs.get("path", "") == "" and attrs.get("content", "") == "":
raise ValidationError(_("Either path or content must be set."))
return super().validate(attrs)
class Meta:
model = BlueprintInstance
@ -71,7 +54,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
"enabled",
"managed_models",
"metadata",
"content",
]
extra_kwargs = {
"status": {"read_only": True},

View File

@ -57,10 +57,9 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
def reconcile_blueprints_discover(self):
"""Run blueprint discovery"""
from authentik.blueprints.v1.tasks import blueprints_discover, clear_failed_blueprints
from authentik.blueprints.v1.tasks import blueprints_discover
blueprints_discover.delay()
clear_failed_blueprints.delay()
def import_models(self):
super().import_models()

View File

@ -1,23 +0,0 @@
# Generated by Django 4.1.5 on 2023-01-10 19:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="blueprintinstance",
name="content",
field=models.TextField(blank=True, default=""),
),
migrations.AlterField(
model_name="blueprintinstance",
name="path",
field=models.TextField(blank=True, default=""),
),
]

View File

@ -1,18 +1,30 @@
"""blueprint models"""
from pathlib import Path
from urllib.parse import urlparse
from uuid import uuid4
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils.translation import gettext_lazy as _
from opencontainers.distribution.reggie import (
NewClient,
WithDebug,
WithDefaultName,
WithDigest,
WithReference,
WithUserAgent,
WithUsernamePassword,
)
from requests.exceptions import RequestException
from rest_framework.serializers import Serializer
from structlog import get_logger
from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
LOGGER = get_logger()
@ -62,8 +74,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField(default="", blank=True)
content = models.TextField(default="", blank=True)
path = models.TextField()
context = models.JSONField(default=dict)
last_applied = models.DateTimeField(auto_now=True)
last_applied_hash = models.TextField()
@ -75,29 +86,60 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry"""
client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
url = urlparse(self.path)
ref = "latest"
path = url.path[1:]
if ":" in url.path:
path, _, ref = path.partition(":")
client = NewClient(
f"https://{url.hostname}",
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(url.username, url.password),
WithDefaultName(path),
WithDebug(True),
)
LOGGER.info("Fetching OCI manifests for blueprint", instance=self)
manifest_request = client.NewRequest(
"GET",
"/v2/<name>/manifests/<reference>",
WithReference(ref),
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
try:
manifests = client.fetch_manifests()
return client.fetch_blobs(manifests)
except OCIException as exc:
manifest_response = client.Do(manifest_request)
manifest_response.raise_for_status()
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
manifest = manifest_response.json()
if "errors" in manifest:
raise BlueprintRetrievalFailed(manifest["errors"])
def retrieve_file(self) -> str:
"""Get blueprint from path"""
blob = None
for layer in manifest.get("layers", []):
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
blob = layer.get("digest")
LOGGER.debug("Found layer with matching media type", instance=self, blob=blob)
if not blob:
raise BlueprintRetrievalFailed("Blob not found")
blob_request = client.NewRequest(
"GET",
"/v2/<name>/blobs/<digest>",
WithDigest(blob),
)
try:
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
except (IOError, OSError) as exc:
blob_response = client.Do(blob_request)
blob_response.raise_for_status()
return blob_response.text
except RequestException as exc:
raise BlueprintRetrievalFailed(exc) from exc
def retrieve(self) -> str:
"""Retrieve blueprint contents"""
if self.path.startswith("oci://"):
return self.retrieve_oci()
if self.path != "":
return self.retrieve_file()
return self.content
full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
with full_path.open("r", encoding="utf-8") as _file:
return _file.read()
@property
def serializer(self) -> Serializer:

View File

@ -9,9 +9,4 @@ CELERY_BEAT_SCHEDULE = {
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
"blueprints_v1_cleanup": {
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View File

@ -4,7 +4,6 @@ entries:
pk: cb954fd4-65a5-4ad9-b1ee-180ee9559cf4
model: authentik_stages_prompt.prompt
attrs:
name: qwerweqrq
field_key: username
label: Username
type: username

View File

@ -3,12 +3,6 @@ context:
foo: bar
policy_property: name
policy_property_value: foo-bar-baz-qux
sequence:
- foo
- bar
mapping:
key1: value
key2: 2
entries:
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
state: !Format ["%s", present]
@ -25,7 +19,7 @@ entries:
[slug, default-source-authentication],
]
enrollment_flow:
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
!Find [authentik_flows.Flow, [slug, default-source-enrollment]]
- attrs:
expression: return True
identifiers:
@ -98,49 +92,6 @@ entries:
]
if_true_simple: !If [!Context foo, true, text]
if_false_simple: !If [null, false, 2]
enumerate_mapping_to_mapping: !Enumerate [
!Context mapping,
MAP,
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
]
enumerate_mapping_to_sequence: !Enumerate [
!Context mapping,
SEQ,
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
]
enumerate_sequence_to_sequence: !Enumerate [
!Context sequence,
SEQ,
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
]
enumerate_sequence_to_mapping: !Enumerate [
!Context sequence,
MAP,
[!Format ["index: %d", !Index 0], !Value 0]
]
nested_complex_enumeration: !Enumerate [
!Context sequence,
MAP,
[
!Index 0,
!Enumerate [
!Context mapping,
MAP,
[
!Format ["%s", !Index 0],
[
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
{
outer_value: !Value 1,
outer_index: !Index 1,
middle_value: !Value 0,
middle_index: !Index 0
}
]
]
]
]
]
identifiers:
name: test
conditions:

View File

@ -2,8 +2,7 @@
from django.test import TransactionTestCase
from requests_mock import Mocker
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.oci import OCI_MEDIA_TYPE
from authentik.blueprints.models import OCI_MEDIA_TYPE, BlueprintInstance, BlueprintRetrievalFailed
class TestBlueprintOCI(TransactionTestCase):

View File

@ -13,7 +13,7 @@ from authentik.tenants.models import Tenant
class TestPackaged(TransactionTestCase):
"""Empty class, test methods are added dynamically"""
@apply_blueprint("default/default-tenant.yaml")
@apply_blueprint("default/90-default-tenant.yaml")
def test_decorator_static(self):
"""Test @apply_blueprint decorator"""
self.assertTrue(Tenant.objects.filter(domain="authentik-default").exists())

View File

@ -162,61 +162,6 @@ class TestBlueprintsV1(TransactionTestCase):
"if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True,
"if_false_simple": 2,
"enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value",
"prefix-key2": "other-prefix-2",
},
"enumerate_mapping_to_sequence": [
"prefixed-pair-key1-value",
"prefixed-pair-key2-2",
],
"enumerate_sequence_to_sequence": [
"prefixed-items-0-foo",
"prefixed-items-1-bar",
],
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
"nested_complex_enumeration": {
"0": {
"key1": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-f", "prefixed-o", "prefixed-o"],
{
"outer_value": "foo",
"outer_index": 0,
"middle_value": 2,
"middle_index": "key2",
},
],
},
"1": {
"key1": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": "value",
"middle_index": "key1",
},
],
"key2": [
["prefixed-b", "prefixed-a", "prefixed-r"],
{
"outer_value": "bar",
"outer_index": 1,
"middle_value": 2,
"middle_index": "key2",
},
],
},
},
}
)
)
@ -262,21 +207,15 @@ class TestBlueprintsV1(TransactionTestCase):
with transaction_rollback():
# First stage fields
username_prompt = Prompt.objects.create(
name=generate_id(),
field_key="username",
label="Username",
order=0,
type=FieldTypes.TEXT,
field_key="username", label="Username", order=0, type=FieldTypes.TEXT
)
password = Prompt.objects.create(
name=generate_id(),
field_key="password",
label="Password",
order=1,
type=FieldTypes.PASSWORD,
)
password_repeat = Prompt.objects.create(
name=generate_id(),
field_key="password_repeat",
label="Password (repeat)",
order=2,

View File

@ -43,28 +43,3 @@ class TestBlueprintsV1API(APITestCase):
"6871c0003f5c07be5c3316d9d4a08444bd8fed1b3f03294e51e44522"
),
)
def test_api_blank(self):
"""Test blank"""
res = self.client.post(
reverse("authentik_api:blueprintinstance-list"),
data={
"name": "foo",
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(), {"non_field_errors": ["Either path or content must be set."]}
)
def test_api_content(self):
"""Test blank"""
res = self.client.post(
reverse("authentik_api:blueprintinstance-list"),
data={
"name": "foo",
"content": '{"version": 3}',
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})

View File

@ -1,15 +1,13 @@
"""transfer common classes"""
from collections import OrderedDict
from copy import copy
from dataclasses import asdict, dataclass, field, is_dataclass
from enum import Enum
from functools import reduce
from operator import ixor
from os import getenv
from typing import Any, Iterable, Literal, Mapping, Optional, Union
from typing import Any, Literal, Optional, Union
from uuid import UUID
from deepmerge import always_merger
from django.apps import apps
from django.db.models import Model, Q
from rest_framework.fields import Field
@ -66,13 +64,11 @@ class BlueprintEntry:
identifiers: dict[str, Any] = field(default_factory=dict)
attrs: Optional[dict[str, Any]] = field(default_factory=dict)
# pylint: disable=invalid-name
id: Optional[str] = None
_state: BlueprintEntryState = field(default_factory=BlueprintEntryState)
def __post_init__(self, *args, **kwargs) -> None:
self.__tag_contexts: list["YAMLTagContext"] = []
@staticmethod
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
"""Convert a SerializerModel instance to a blueprint Entry"""
@ -89,46 +85,17 @@ class BlueprintEntry:
attrs=all_attrs,
)
def _get_tag_context(
self,
depth: int = 0,
context_tag_type: Optional[type["YAMLTagContext"] | tuple["YAMLTagContext", ...]] = None,
) -> "YAMLTagContext":
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
if depth < 0:
raise ValueError("depth must be a positive number or zero")
if context_tag_type:
contexts = [x for x in self.__tag_contexts if isinstance(x, context_tag_type)]
else:
contexts = self.__tag_contexts
try:
return contexts[-(depth + 1)]
except IndexError:
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}")
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
"""Check if we have any special tags that need handling"""
val = copy(value)
if isinstance(value, YAMLTagContext):
self.__tag_contexts.append(value)
if isinstance(value, YAMLTag):
val = value.resolve(self, blueprint)
return value.resolve(self, blueprint)
if isinstance(value, dict):
for key, inner_value in value.items():
val[key] = self.tag_resolver(inner_value, blueprint)
value[key] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, list):
for idx, inner_value in enumerate(value):
val[idx] = self.tag_resolver(inner_value, blueprint)
if isinstance(value, YAMLTagContext):
self.__tag_contexts.pop()
return val
value[idx] = self.tag_resolver(inner_value, blueprint)
return value
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
"""Get attributes of this entry, with all yaml tags resolved"""
@ -178,19 +145,12 @@ class YAMLTag:
raise NotImplementedError
class YAMLTagContext:
"""Base class for all YAML Tag Contexts"""
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
"""Implement yaml tag context logic"""
raise NotImplementedError
class KeyOf(YAMLTag):
"""Reference another object by their ID"""
id_from: str
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.id_from = node.value
@ -217,6 +177,7 @@ class Env(YAMLTag):
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
@ -236,6 +197,7 @@ class Context(YAMLTag):
key: str
default: Optional[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
super().__init__()
self.default = None
@ -258,6 +220,7 @@ class Format(YAMLTag):
format_string: str
args: list[Any]
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.format_string = node.value[0].value
@ -282,12 +245,15 @@ class Format(YAMLTag):
class Find(YAMLTag):
"""Find any object"""
model_name: str | YAMLTag
model_name: str
conditions: list[list]
model_class: type[Model]
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.model_name = loader.construct_object(node.value[0])
self.model_name = node.value[0].value
self.model_class = apps.get_model(*self.model_name.split("."))
self.conditions = []
for raw_node in node.value[1:]:
values = []
@ -296,13 +262,6 @@ class Find(YAMLTag):
self.conditions.append(values)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.model_name, YAMLTag):
model_name = self.model_name.resolve(entry, blueprint)
else:
model_name = self.model_name
model_class = apps.get_model(*model_name.split("."))
query = Q()
for cond in self.conditions:
if isinstance(cond[0], YAMLTag):
@ -314,7 +273,7 @@ class Find(YAMLTag):
else:
query_value = cond[1]
query &= Q(**{query_key: query_value})
instance = model_class.objects.filter(query).first()
instance = self.model_class.objects.filter(query).first()
if instance:
return instance.pk
return None
@ -337,6 +296,7 @@ class Condition(YAMLTag):
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
}
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.mode = node.value[0].value
@ -369,6 +329,7 @@ class If(YAMLTag):
when_true: Any
when_false: Any
# pylint: disable=unused-argument
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.condition = loader.construct_object(node.value[0])
@ -390,133 +351,6 @@ class If(YAMLTag):
raise EntryInvalidError(exc)
class Enumerate(YAMLTag, YAMLTagContext):
"""Iterate over an iterable."""
iterable: YAMLTag | Iterable
item_body: Any
output_body: Literal["SEQ", "MAP"]
_OUTPUT_BODIES = {
"SEQ": (list, lambda a, b: [*a, b]),
"MAP": (
dict,
lambda a, b: always_merger.merge(
a, {b[0]: b[1]} if isinstance(b, (tuple, list)) else b
),
),
}
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.iterable = loader.construct_object(node.value[0])
self.output_body = node.value[1].value
self.item_body = loader.construct_object(node.value[2])
self.__current_context: tuple[Any, Any] = tuple()
def get_context(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
return self.__current_context
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
raise EntryInvalidError(
f"{self.__class__.__name__} tag's iterable references this tag's context. "
"This is a noop. Check you are setting depth bigger than 0."
)
if isinstance(self.iterable, YAMLTag):
iterable = self.iterable.resolve(entry, blueprint)
else:
iterable = self.iterable
if not isinstance(iterable, Iterable):
raise EntryInvalidError(
f"{self.__class__.__name__}'s iterable must be an iterable "
"such as a sequence or a mapping"
)
if isinstance(iterable, Mapping):
iterable = tuple(iterable.items())
else:
iterable = tuple(enumerate(iterable))
try:
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
except KeyError as exc:
raise EntryInvalidError(exc)
result = output_class()
self.__current_context = tuple()
try:
for item in iterable:
self.__current_context = item
resolved_body = entry.tag_resolver(self.item_body, blueprint)
result = add_fn(result, resolved_body)
if not isinstance(result, output_class):
raise EntryInvalidError(
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
)
finally:
self.__current_context = tuple()
return result
class EnumeratedItem(YAMLTag):
"""Get the current item value and index provided by an Enumerate tag context"""
depth: int
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
super().__init__()
self.depth = int(node.value)
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
try:
context_tag: Enumerate = entry._get_tag_context(
depth=self.depth,
context_tag_type=EnumeratedItem._SUPPORTED_CONTEXT_TAGS,
)
except ValueError as exc:
if self.depth == 0:
raise EntryInvalidError(
f"{self.__class__.__name__} tags are only usable "
f"inside an {Enumerate.__name__} tag"
)
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
return context_tag.get_context(entry, blueprint)
class Index(EnumeratedItem):
"""Get the current item index provided by an Enumerate tag context"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
context = super().resolve(entry, blueprint)
try:
return context[0]
except IndexError: # pragma: no cover
raise EntryInvalidError(f"Empty/invalid context: {context}")
class Value(EnumeratedItem):
"""Get the current item value provided by an Enumerate tag context"""
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
context = super().resolve(entry, blueprint)
try:
return context[1]
except IndexError: # pragma: no cover
raise EntryInvalidError(f"Empty/invalid context: {context}")
class BlueprintDumper(SafeDumper):
"""Dump dataclasses to yaml"""
@ -560,9 +394,6 @@ class BlueprintLoader(SafeLoader):
self.add_constructor("!Condition", Condition)
self.add_constructor("!If", If)
self.add_constructor("!Env", Env)
self.add_constructor("!Enumerate", Enumerate)
self.add_constructor("!Value", Value)
self.add_constructor("!Index", Index)
class EntryInvalidError(SentryIgnoredException):

View File

@ -3,4 +3,3 @@
LABEL_AUTHENTIK_SYSTEM = "blueprints.goauthentik.io/system"
LABEL_AUTHENTIK_INSTANTIATE = "blueprints.goauthentik.io/instantiate"
LABEL_AUTHENTIK_GENERATED = "blueprints.goauthentik.io/generated"
LABEL_AUTHENTIK_DESCRIPTION = "blueprints.goauthentik.io/description"

View File

@ -1,98 +0,0 @@
"""OCI Client"""
from typing import Any
from urllib.parse import ParseResult, urlparse
from opencontainers.distribution.reggie import (
NewClient,
WithDebug,
WithDefaultName,
WithDigest,
WithReference,
WithUserAgent,
WithUsernamePassword,
)
from requests.exceptions import RequestException
from structlog import get_logger
from structlog.stdlib import BoundLogger
from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
class OCIException(SentryIgnoredException):
"""OCI-related errors"""
class BlueprintOCIClient:
"""Blueprint OCI Client"""
url: ParseResult
sanitized_url: str
logger: BoundLogger
ref: str
client: NewClient
def __init__(self, url: str) -> None:
self._parse_url(url)
self.logger = get_logger().bind(url=self.sanitized_url)
self.ref = "latest"
path = self.url.path[1:]
if ":" in self.url.path:
path, _, self.ref = path.partition(":")
self.client = NewClient(
f"https://{self.url.hostname}",
WithUserAgent(authentik_user_agent()),
WithUsernamePassword(self.url.username, self.url.password),
WithDefaultName(path),
WithDebug(True),
)
def _parse_url(self, url: str):
self.url = urlparse(url)
netloc = self.url.netloc
if "@" in netloc:
netloc = netloc[netloc.index("@") + 1 :]
self.sanitized_url = self.url._replace(netloc=netloc).geturl()
def fetch_manifests(self) -> dict[str, Any]:
"""Fetch manifests for ref"""
self.logger.info("Fetching OCI manifests for blueprint")
manifest_request = self.client.NewRequest(
"GET",
"/v2/<name>/manifests/<reference>",
WithReference(self.ref),
).SetHeader("Accept", "application/vnd.oci.image.manifest.v1+json")
try:
manifest_response = self.client.Do(manifest_request)
manifest_response.raise_for_status()
except RequestException as exc:
raise OCIException(exc) from exc
manifest = manifest_response.json()
if "errors" in manifest:
raise OCIException(manifest["errors"])
return manifest
def fetch_blobs(self, manifest: dict[str, Any]):
"""Fetch blob based on manifest info"""
blob = None
for layer in manifest.get("layers", []):
if layer.get("mediaType", "") == OCI_MEDIA_TYPE:
blob = layer.get("digest")
self.logger.debug("Found layer with matching media type", blob=blob)
if not blob:
raise OCIException("Blob not found")
blob_request = self.client.NewRequest(
"GET",
"/v2/<name>/blobs/<digest>",
WithDigest(blob),
)
try:
blob_response = self.client.Do(blob_request)
blob_response.raise_for_status()
return blob_response.text
except RequestException as exc:
raise OCIException(exc) from exc

View File

@ -219,14 +219,3 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
finally:
if instance:
instance.save()
@CELERY_APP.task()
def clear_failed_blueprints():
"""Remove blueprints which couldn't be fetched"""
# Exclude OCI blueprints as those might be temporarily unavailable
for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
try:
blueprint.retrieve()
except BlueprintRetrievalFailed:
blueprint.delete()

View File

@ -1,10 +1,8 @@
"""Application API Views"""
from datetime import timedelta
from typing import Optional
from django.core.cache import cache
from django.db.models import QuerySet
from django.db.models.functions import ExtractHour
from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from drf_spectacular.types import OpenApiTypes
@ -227,6 +225,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
methods=["POST"],
parser_classes=(MultiPartParser,),
)
# pylint: disable=unused-argument
def set_icon(self, request: Request, slug: str):
"""Set application icon"""
app: Application = self.get_object()
@ -246,6 +245,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
filter_backends=[],
methods=["POST"],
)
# pylint: disable=unused-argument
def set_icon_url(self, request: Request, slug: str):
"""Set application icon (as URL)"""
app: Application = self.get_object()
@ -254,14 +254,15 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.view_application", ["authentik_events.view_event"])
@extend_schema(responses={200: CoordinateSerializer(many=True)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
def metrics(self, request: Request, slug: str):
"""Metrics for application logins"""
app = self.get_object()
return Response(
get_objects_for_user(request.user, "authentik_events.view_event").filter(
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(
action=EventAction.AUTHORIZE_APPLICATION,
context__authorized_application__pk=app.pk.hex,
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
.get_events_per_hour()
)

View File

@ -96,6 +96,7 @@ class GroupFilter(FilterSet):
queryset=User.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args"""
try:
@ -156,6 +157,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument, invalid-name
def add_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()
@ -180,6 +182,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"], pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument, invalid-name
def remove_user(self, request: Request, pk: str) -> Response:
"""Add user to group"""
group: Group = self.get_object()

View File

@ -117,6 +117,7 @@ class PropertyMappingViewSet(
],
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument, invalid-name
def test(self, request: Request, pk: str) -> Response:
"""Test Property Mapping"""
mapping: PropertyMapping = self.get_object()

View File

@ -102,6 +102,7 @@ class SourceViewSet(
methods=["POST"],
parser_classes=(MultiPartParser,),
)
# pylint: disable=unused-argument
def set_icon(self, request: Request, slug: str):
"""Set source icon"""
source: Source = self.get_object()
@ -121,6 +122,7 @@ class SourceViewSet(
filter_backends=[],
methods=["POST"],
)
# pylint: disable=unused-argument
def set_icon_url(self, request: Request, slug: str):
"""Set source icon (as URL)"""
source: Source = self.get_object()

View File

@ -112,6 +112,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
}
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["GET"])
# pylint: disable=unused-argument
def view_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
token: Token = self.get_object()
@ -133,6 +134,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument
def set_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
token: Token = self.get_object()

View File

@ -53,7 +53,7 @@ class UsedByMixin:
responses={200: UsedBySerializer(many=True)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=too-many-locals
# pylint: disable=invalid-name, unused-argument, too-many-locals
def used_by(self, request: Request, *args, **kwargs) -> Response:
"""Get a list of all objects that use this object"""
# pyright: reportGeneralTypeIssues=false

View File

@ -4,9 +4,6 @@ from json import loads
from typing import Any, Optional
from django.contrib.auth import update_session_auth_hash
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.db.models.functions import ExtractHour
from django.db.models.query import QuerySet
from django.db.transaction import atomic
from django.db.utils import IntegrityError
@ -59,7 +56,6 @@ from authentik.core.models import (
USER_ATTRIBUTE_SA,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT,
AuthenticatedSession,
Group,
Token,
TokenIntents,
@ -203,44 +199,38 @@ class SessionUserSerializer(PassiveSerializer):
class UserMetricsSerializer(PassiveSerializer):
"""User Metrics"""
logins = SerializerMethodField()
logins_failed = SerializerMethodField()
authorizations = SerializerMethodField()
logins_per_1h = SerializerMethodField()
logins_failed_per_1h = SerializerMethodField()
authorizations_per_1h = SerializerMethodField()
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins(self, _):
"""Get successful logins per 8 hours for the last 7 days"""
def get_logins_per_1h(self, _):
"""Get successful logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN, user__pk=user.pk)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_logins_failed(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_logins_failed_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.LOGIN_FAILED, context__username=user.username
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.LOGIN_FAILED, context__username=user.username)
.get_events_per_hour()
)
@extend_schema_field(CoordinateSerializer(many=True))
def get_authorizations(self, _):
"""Get failed logins per 8 hours for the last 7 days"""
def get_authorizations_per_1h(self, _):
"""Get failed logins per hour for the last 24 hours"""
user = self.context["user"]
return (
get_objects_for_user(user, "authentik_events.view_event").filter(
action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk
)
# 3 data points per day, so 8 hour spans
.get_events_per(timedelta(days=7), ExtractHour, 7 * 3)
get_objects_for_user(user, "authentik_events.view_event")
.filter(action=EventAction.AUTHORIZE_APPLICATION, user__pk=user.pk)
.get_events_per_hour()
)
@ -272,6 +262,7 @@ class UsersFilter(FilterSet):
queryset=Group.objects.all(),
)
# pylint: disable=unused-argument
def filter_attributes(self, queryset, name, value):
"""Filter attributes by query args"""
try:
@ -406,8 +397,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
return Response(data={"non_field_errors": [str(exc)]}, status=400)
@extend_schema(responses={200: SessionUserSerializer(many=False)})
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
def user_me(self, request: Request) -> Response:
@action(detail=False, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name
def me(self, request: Request) -> Response:
"""Get information about current user"""
context = {"request": request}
serializer = SessionUserSerializer(
@ -435,6 +427,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, methods=["POST"])
# pylint: disable=invalid-name, unused-argument
def set_password(self, request: Request, pk: int) -> Response:
"""Set password for user"""
user: User = self.get_object()
@ -452,6 +445,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
@permission_required("authentik_core.view_user", ["authentik_events.view_event"])
@extend_schema(responses={200: UserMetricsSerializer(many=False)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def metrics(self, request: Request, pk: int) -> Response:
"""User metrics per 1h"""
user: User = self.get_object()
@ -467,6 +461,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def recovery(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
link, _ = self._create_recovery_link()
@ -491,6 +486,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def recovery_email(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their accounts"""
for_user: User = self.get_object()
@ -564,14 +560,3 @@ class UserViewSet(UsedByMixin, ModelViewSet):
)
}
)
def partial_update(self, request: Request, *args, **kwargs) -> Response:
response = super().partial_update(request, *args, **kwargs)
instance: User = self.get_object()
if not instance.is_active:
sessions = AuthenticatedSession.objects.filter(user=instance)
session_ids = sessions.values_list("session_key", flat=True)
cache.delete_many(f"{KEY_PREFIX}{session}" for session in session_ids)
sessions.delete()
LOGGER.debug("Deleted user's sessions", user=instance.username)
return response

View File

@ -49,6 +49,7 @@ class Command(BaseCommand):
return namespace
@staticmethod
# pylint: disable=unused-argument
def post_save_handler(sender, instance: Model, created: bool, **_):
"""Signal handler for all object's post_save"""
if not should_log_model(instance):
@ -64,6 +65,7 @@ class Command(BaseCommand):
).save()
@staticmethod
# pylint: disable=unused-argument
def pre_delete_handler(sender, instance: Model, **_):
"""Signal handler for all object's pre_delete"""
if not should_log_model(instance): # pragma: no cover

View File

@ -20,6 +20,7 @@ if TYPE_CHECKING:
@receiver(post_save)
# pylint: disable=unused-argument
def post_save_application(sender: type[Model], instance, created: bool, **_):
"""Clear user's application cache upon application creation"""
from authentik.core.api.applications import user_app_cache_key
@ -35,6 +36,7 @@ def post_save_application(sender: type[Model], instance, created: bool, **_):
@receiver(user_logged_in)
# pylint: disable=unused-argument
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
"""Create an AuthenticatedSession from request"""
from authentik.core.models import AuthenticatedSession
@ -45,6 +47,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
@receiver(user_logged_out)
# pylint: disable=unused-argument
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
"""Delete AuthenticatedSession if it exists"""
from authentik.core.models import AuthenticatedSession

View File

@ -48,6 +48,7 @@ class Action(Enum):
class MessageStage(StageView):
"""Show a pre-configured message after the flow is done"""
# pylint: disable=unused-argument
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Show a pre-configured message after the flow is done"""
message = getattr(self.executor.current_stage, "message", "")
@ -208,6 +209,7 @@ class SourceFlowManager:
response.error_message = error.messages
return response
# pylint: disable=unused-argument
def get_stages_to_append(self, flow: Flow) -> list[Stage]:
"""Hook to override stages which are appended to the flow"""
if not self.source.enrollment_flow:
@ -262,6 +264,7 @@ class SourceFlowManager:
flow_slug=flow.slug,
)
# pylint: disable=unused-argument
def handle_auth(
self,
connection: UserSourceConnection,

View File

@ -13,6 +13,7 @@ class PostUserEnrollmentStage(StageView):
"""Dynamically injected stage which saves the Connection after
the user has been enrolled."""
# pylint: disable=unused-argument
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Stage used after the user has been enrolled"""
connection: UserSourceConnection = self.executor.plan.context[

View File

@ -60,7 +60,7 @@
<div class="ak-login-container">
<header class="pf-c-login__header">
<div class="pf-c-brand ak-brand">
<img src="{{ tenant.branding_logo }}" alt="authentik Logo" />
<img src="{{ tenant.branding_logo }}" alt="authentik icon" />
</div>
</header>
{% block main_container %}

View File

@ -1,6 +1,7 @@
"""Test Source flow_manager"""
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase
from django.test.client import RequestFactory
from guardian.utils import get_anonymous_user
from authentik.core.models import SourceUserMatchingModes, User
@ -21,6 +22,7 @@ class TestSourceFlowManager(TestCase):
def setUp(self) -> None:
super().setUp()
self.source: OAuthSource = OAuthSource.objects.create(name="test")
self.factory = RequestFactory()
self.identifier = generate_id()
def test_unauthenticated_enroll(self):

View File

@ -1,12 +1,10 @@
"""Test Users API"""
from json import loads
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.urls.base import reverse
from rest_framework.test import APITestCase
from authentik.core.models import AuthenticatedSession, User
from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
from authentik.flows.models import FlowDesignation
from authentik.lib.config import CONFIG
@ -259,26 +257,3 @@ class TestUsersAPI(APITestCase):
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(body["user"]["avatar"], "bar")
def test_session_delete(self):
"""Ensure sessions are deleted when a user is deactivated"""
user = create_test_admin_user()
session_id = generate_id()
AuthenticatedSession.objects.create(
user=user,
session_key=session_id,
last_ip="",
)
cache.set(KEY_PREFIX + session_id, "foo")
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-detail", kwargs={"pk": user.pk}),
data={
"is_active": False,
},
)
self.assertEqual(response.status_code, 200)
self.assertIsNone(cache.get(KEY_PREFIX + session_id))
self.assertFalse(AuthenticatedSession.objects.filter(session_key=session_id).exists())

View File

@ -47,11 +47,11 @@ def create_test_tenant() -> Tenant:
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:
"""Generate a certificate for testing"""
builder = CertificateBuilder(
name=f"{generate_id()}.self-signed.goauthentik.io",
use_ec_private_key=use_ec_private_key,
)
builder.common_name = "goauthentik.io"
builder.build(
subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"],
subject_alt_names=["goauthentik.io"],
validity_days=360,
)
builder.common_name = generate_id()

View File

@ -187,6 +187,7 @@ class CertificateKeyPairFilter(FilterSet):
label="Only return certificate-key pairs with keys", method="filter_has_key"
)
# pylint: disable=unused-argument
def filter_has_key(self, queryset, name, value): # pragma: no cover
"""Only return certificate-key pairs with keys"""
return queryset.exclude(key_data__exact="")
@ -235,11 +236,10 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
data = CertificateGenerationSerializer(data=request.data)
if not data.is_valid():
return Response(data.errors, status=400)
raw_san = data.validated_data.get("subject_alt_name", "")
sans = raw_san.split(",") if raw_san != "" else []
builder = CertificateBuilder(data.validated_data["common_name"])
builder = CertificateBuilder()
builder.common_name = data.validated_data["common_name"]
builder.build(
subject_alt_names=sans,
subject_alt_names=data.validated_data.get("subject_alt_name", "").split(","),
validity_days=int(data.validated_data["validity_days"]),
)
instance = builder.save()
@ -257,6 +257,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
responses={200: CertificateDataSerializer(many=False)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def view_certificate(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs certificate and log access"""
certificate: CertificateKeyPair = self.get_object()
@ -287,6 +288,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
responses={200: CertificateDataSerializer(many=False)},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=invalid-name, unused-argument
def view_private_key(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs private key and log access"""
certificate: CertificateKeyPair = self.get_object()

View File

@ -27,16 +27,20 @@ class AuthentikCryptoConfig(ManagedAppConfig):
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
builder = CertificateBuilder("authentik Internal JWT Certificate")
builder = CertificateBuilder()
builder.common_name = "goauthentik.io"
builder.build(
subject_alt_names=["goauthentik.io"],
validity_days=360,
)
if not cert:
cert = CertificateKeyPair()
builder.cert = cert
builder.cert.managed = MANAGED_KEY
builder.save()
cert.certificate_data = builder.certificate
cert.key_data = builder.private_key
cert.name = "authentik Internal JWT Certificate"
cert.managed = MANAGED_KEY
cert.save()
def reconcile_managed_jwt_cert(self):
"""Ensure managed JWT certificate"""
@ -59,6 +63,10 @@ class AuthentikCryptoConfig(ManagedAppConfig):
name = "authentik Self-signed Certificate"
if CertificateKeyPair.objects.filter(name=name).exists():
return
builder = CertificateBuilder(name)
builder = CertificateBuilder()
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
builder.save()
CertificateKeyPair.objects.create(
name="authentik Self-signed Certificate",
certificate_data=builder.certificate,
key_data=builder.private_key,
)

View File

@ -21,13 +21,13 @@ class CertificateBuilder:
_use_ec_private_key: bool
def __init__(self, name: str, use_ec_private_key=False):
def __init__(self, use_ec_private_key=False):
self._use_ec_private_key = use_ec_private_key
self.__public_key = None
self.__private_key = None
self.__builder = None
self.__certificate = None
self.common_name = name
self.common_name = "authentik Self-signed Certificate"
self.cert = CertificateKeyPair()
def save(self) -> CertificateKeyPair:
@ -57,10 +57,7 @@ class CertificateBuilder:
one_day = datetime.timedelta(1, 0, 0)
self.__private_key = self.generate_private_key()
self.__public_key = self.__private_key.public_key()
alt_names: list[x509.GeneralName] = []
for alt_name in subject_alt_names or []:
if alt_name.strip() != "":
alt_names.append(x509.DNSName(alt_name))
alt_names: list[x509.GeneralName] = [x509.DNSName(x) for x in subject_alt_names or []]
self.__builder = (
x509.CertificateBuilder()
.subject_name(
@ -79,15 +76,12 @@ class CertificateBuilder:
]
)
)
.add_extension(x509.SubjectAlternativeName(alt_names), critical=True)
.not_valid_before(datetime.datetime.today() - one_day)
.not_valid_after(datetime.datetime.today() + datetime.timedelta(days=validity_days))
.serial_number(int(uuid.uuid4()))
.public_key(self.__public_key)
)
if alt_names:
self.__builder = self.__builder.add_extension(
x509.SubjectAlternativeName(alt_names), critical=True
)
self.__certificate = self.__builder.sign(
private_key=self.__private_key,
algorithm=hashes.SHA256(),

View File

@ -4,8 +4,6 @@ from json import loads
from os import makedirs
from tempfile import TemporaryDirectory
from cryptography.x509.extensions import SubjectAlternativeName
from cryptography.x509.general_name import DNSName
from django.urls import reverse
from rest_framework.test import APITestCase
@ -16,7 +14,7 @@ from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
from authentik.crypto.tasks import MANAGED_DISCOVERED, certificate_discovery
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id, generate_key
from authentik.lib.generators import generate_key
from authentik.providers.oauth2.models import OAuth2Provider
@ -56,8 +54,8 @@ class TestCrypto(APITestCase):
def test_builder(self):
"""Test Builder"""
name = generate_id()
builder = CertificateBuilder(name)
builder = CertificateBuilder()
builder.common_name = "test-cert"
with self.assertRaises(ValueError):
builder.save()
builder.build(
@ -66,49 +64,17 @@ class TestCrypto(APITestCase):
)
instance = builder.save()
now = datetime.datetime.today()
self.assertEqual(instance.name, name)
self.assertEqual(instance.name, "test-cert")
self.assertEqual((instance.certificate.not_valid_after - now).days, 2)
def test_builder_api(self):
"""Test Builder (via API)"""
self.client.force_login(create_test_admin_user())
name = generate_id()
self.client.post(
reverse("authentik_api:certificatekeypair-generate"),
data={"common_name": name, "subject_alt_name": "bar,baz", "validity_days": 3},
data={"common_name": "foo", "subject_alt_name": "bar,baz", "validity_days": 3},
)
key = CertificateKeyPair.objects.filter(name=name).first()
self.assertIsNotNone(key)
ext: SubjectAlternativeName = key.certificate.extensions[0].value
self.assertIsInstance(ext, SubjectAlternativeName)
self.assertIsInstance(ext[0], DNSName)
self.assertEqual(ext[0].value, "bar")
self.assertIsInstance(ext[1], DNSName)
self.assertEqual(ext[1].value, "baz")
def test_builder_api_empty_san(self):
"""Test Builder (via API)"""
self.client.force_login(create_test_admin_user())
name = generate_id()
self.client.post(
reverse("authentik_api:certificatekeypair-generate"),
data={"common_name": name, "subject_alt_name": "", "validity_days": 3},
)
key = CertificateKeyPair.objects.filter(name=name).first()
self.assertIsNotNone(key)
self.assertEqual(len(key.certificate.extensions), 0)
def test_builder_api_empty_san_multiple(self):
"""Test Builder (via API)"""
self.client.force_login(create_test_admin_user())
name = generate_id()
self.client.post(
reverse("authentik_api:certificatekeypair-generate"),
data={"common_name": name, "subject_alt_name": ", ", "validity_days": 3},
)
key = CertificateKeyPair.objects.filter(name=name).first()
self.assertIsNotNone(key)
self.assertEqual(len(key.certificate.extensions), 0)
self.assertTrue(CertificateKeyPair.objects.filter(name="foo").exists())
def test_builder_api_invalid(self):
"""Test Builder (via API) (invalid)"""
@ -227,8 +193,8 @@ class TestCrypto(APITestCase):
def test_discovery(self):
"""Test certificate discovery"""
name = generate_id()
builder = CertificateBuilder(name)
builder = CertificateBuilder()
builder.common_name = "test-cert"
with self.assertRaises(ValueError):
builder.save()
builder.build(

View File

@ -1,11 +1,9 @@
"""Events API Views"""
from datetime import timedelta
from json import loads
import django_filters
from django.db.models.aggregates import Count
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import ExtractDay
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user
@ -83,6 +81,7 @@ class EventsFilter(django_filters.FilterSet):
label="Tenant name",
)
# pylint: disable=unused-argument
def filter_context_model_pk(self, queryset, name, value):
"""Because we store the PK as UUID.hex,
we need to remove the dashes that a client may send. We can't use a
@ -179,7 +178,7 @@ class EventViewSet(ModelViewSet):
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(action=filtered_action)
.filter(**query)
.get_events_per(timedelta(weeks=4), ExtractDay, 30)
.get_events_per_day()
)
@extend_schema(responses={200: TypeCreateSerializer(many=True)})

View File

@ -80,6 +80,7 @@ class NotificationTransportViewSet(UsedByMixin, ModelViewSet):
request=OpenApiTypes.NONE,
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["post"])
# pylint: disable=invalid-name, unused-argument
def test(self, request: Request, pk=None) -> Response:
"""Send example notification using selected transport. Requires
Modify permissions."""

View File

@ -12,21 +12,12 @@ from django.http import HttpRequest, HttpResponse
from django_otp.plugins.otp_static.models import StaticToken
from guardian.models import UserObjectPermission
from authentik.core.models import (
AuthenticatedSession,
PropertyMapping,
Provider,
Source,
User,
UserSourceConnection,
)
from authentik.core.models import AuthenticatedSession, User
from authentik.events.models import Event, EventAction, Notification
from authentik.events.utils import model_to_dict
from authentik.flows.models import FlowToken, Stage
from authentik.flows.models import FlowToken
from authentik.lib.sentry import before_send
from authentik.lib.utils.errors import exception_to_string
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
IGNORED_MODELS = (
Event,
@ -36,14 +27,6 @@ IGNORED_MODELS = (
StaticToken,
Session,
FlowToken,
Provider,
Source,
PropertyMapping,
UserSourceConnection,
Stage,
OutpostServiceConnection,
Policy,
PolicyBindingModel,
)
@ -51,7 +34,7 @@ def should_log_model(model: Model) -> bool:
"""Return true if operation on `model` should be logged"""
if model.__module__.startswith("silk"):
return False
return model.__class__ not in IGNORED_MODELS
return not isinstance(model, IGNORED_MODELS)
class EventNewThread(Thread):
@ -118,6 +101,7 @@ class AuditMiddleware:
self.disconnect(request)
return response
# pylint: disable=unused-argument
def process_exception(self, request: HttpRequest, exception: Exception):
"""Disconnect handlers in case of exception"""
self.disconnect(request)
@ -141,6 +125,7 @@ class AuditMiddleware:
thread.run()
@staticmethod
# pylint: disable=unused-argument
def post_save_handler(
user: User, request: HttpRequest, sender, instance: Model, created: bool, **_
):
@ -152,6 +137,7 @@ class AuditMiddleware:
EventNewThread(action, request, user=user, model=model_to_dict(instance)).run()
@staticmethod
# pylint: disable=unused-argument
def pre_delete_handler(user: User, request: HttpRequest, sender, instance: Model, **_):
"""Signal handler for all object's pre_delete"""
if not should_log_model(instance): # pragma: no cover

View File

@ -11,7 +11,8 @@ from django.conf import settings
from django.db import models
from django.db.models import Count, ExpressionWrapper, F
from django.db.models.fields import DurationField
from django.db.models.functions import Extract
from django.db.models.functions import ExtractHour
from django.db.models.functions.datetime import ExtractDay
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.http import HttpRequest
@ -110,35 +111,48 @@ class EventAction(models.TextChoices):
class EventQuerySet(QuerySet):
"""Custom events query set with helper functions"""
def get_events_per(
self,
time_since: timedelta,
extract: Extract,
data_points: int,
) -> list[dict[str, int]]:
def get_events_per_hour(self) -> list[dict[str, int]]:
"""Get event count by hour in the last day, fill with zeros"""
_now = now()
max_since = timedelta(days=60)
# Allow maximum of 60 days to limit load
if time_since.total_seconds() > max_since.total_seconds():
time_since = max_since
date_from = _now - time_since
date_from = now() - timedelta(days=1)
result = (
self.filter(created__gte=date_from)
.annotate(age=ExpressionWrapper(_now - F("created"), output_field=DurationField()))
.annotate(age_interval=extract("age"))
.values("age_interval")
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
.annotate(age_hours=ExtractHour("age"))
.values("age_hours")
.annotate(count=Count("pk"))
.order_by("age_interval")
.order_by("age_hours")
)
data = Counter({int(d["age_interval"]): d["count"] for d in result})
data = Counter({int(d["age_hours"]): d["count"] for d in result})
results = []
interval_delta = time_since / data_points
for interval in range(1, -data_points, -1):
_now = now()
for hour in range(0, -24, -1):
results.append(
{
"x_cord": time.mktime((_now + (interval_delta * interval)).timetuple()) * 1000,
"y_cord": data[interval * -1],
"x_cord": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
"y_cord": data[hour * -1],
}
)
return results
def get_events_per_day(self) -> list[dict[str, int]]:
"""Get event count by hour in the last day, fill with zeros"""
date_from = now() - timedelta(weeks=4)
result = (
self.filter(created__gte=date_from)
.annotate(age=ExpressionWrapper(now() - F("created"), output_field=DurationField()))
.annotate(age_days=ExtractDay("age"))
.values("age_days")
.annotate(count=Count("pk"))
.order_by("age_days")
)
data = Counter({int(d["age_days"]): d["count"] for d in result})
results = []
_now = now()
for day in range(0, -30, -1):
results.append(
{
"x_cord": time.mktime((_now + timedelta(days=day)).timetuple()) * 1000,
"y_cord": data[day * -1],
}
)
return results
@ -151,14 +165,13 @@ class EventManager(Manager):
"""use custom queryset"""
return EventQuerySet(self.model, using=self._db)
def get_events_per(
self,
time_since: timedelta,
extract: Extract,
data_points: int,
) -> list[dict[str, int]]:
def get_events_per_hour(self) -> list[dict[str, int]]:
"""Wrap method from queryset"""
return self.get_queryset().get_events_per(time_since, extract, data_points)
return self.get_queryset().get_events_per_hour()
def get_events_per_day(self) -> list[dict[str, int]]:
"""Wrap method from queryset"""
return self.get_queryset().get_events_per_day()
class Event(SerializerModel, ExpiringModel):
@ -448,7 +461,7 @@ class NotificationTransport(SerializerModel):
# pyright: reportGeneralTypeIssues=false
return send_mail(mail.__dict__) # pylint: disable=no-value-for-parameter
except (SMTPException, ConnectionError, OSError) as exc:
raise NotificationTransportError(exc) from exc
raise NotificationTransportError from exc
@property
def serializer(self) -> "Serializer":

View File

@ -102,7 +102,7 @@ class TaskInfo:
key = CACHE_KEY_PREFIX + self.task_name
if self.result.uid:
key += f"/{self.result.uid}"
self.task_name += f"/{self.result.uid}"
self.task_name += f"_{self.result.uid}"
self.set_prom_metrics()
cache.set(key, self, timeout=timeout_hours * 60 * 60)

View File

@ -22,6 +22,7 @@ SESSION_LOGIN_EVENT = "login_event"
@receiver(user_logged_in)
# pylint: disable=unused-argument
def on_user_logged_in(sender, request: HttpRequest, user: User, **_):
"""Log successful login"""
kwargs = {}
@ -44,12 +45,14 @@ def get_login_event(request: HttpRequest) -> Optional[Event]:
@receiver(user_logged_out)
# pylint: disable=unused-argument
def on_user_logged_out(sender, request: HttpRequest, user: User, **_):
"""Log successfully logout"""
Event.new(EventAction.LOGOUT).from_http(request, user=user)
@receiver(user_write)
# pylint: disable=unused-argument
def on_user_write(sender, request: HttpRequest, user: User, data: dict[str, Any], **kwargs):
"""Log User write"""
data["created"] = kwargs.get("created", False)
@ -57,6 +60,7 @@ def on_user_write(sender, request: HttpRequest, user: User, data: dict[str, Any]
@receiver(login_failed)
# pylint: disable=unused-argument
def on_login_failed(
signal,
sender,
@ -70,6 +74,7 @@ def on_login_failed(
@receiver(invitation_used)
# pylint: disable=unused-argument
def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_):
"""Log Invitation usage"""
Event.new(EventAction.INVITE_USED, invitation_uuid=invitation.invite_uuid.hex).from_http(
@ -78,18 +83,21 @@ def on_invitation_used(sender, request: HttpRequest, invitation: Invitation, **_
@receiver(password_changed)
# pylint: disable=unused-argument
def on_password_changed(sender, user: User, password: str, **_):
"""Log password change"""
Event.new(EventAction.PASSWORD_SET).from_http(None, user=user)
@receiver(post_save, sender=Event)
# pylint: disable=unused-argument
def event_post_save_notification(sender, instance: Event, **_):
"""Start task to check if any policies trigger an notification on this event"""
event_notification_handler.delay(instance.event_uuid.hex)
@receiver(pre_delete, sender=User)
# pylint: disable=unused-argument
def event_user_pre_delete_cleanup(sender, instance: User, **_):
"""If gdpr_compliance is enabled, remove all the user's events"""
gdpr_cleanup.delay(instance.pk)

View File

@ -210,6 +210,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
def export(self, request: Request, slug: str) -> Response:
"""Export flow to .yaml file"""
flow = self.get_object()
@ -220,6 +221,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
@extend_schema(responses={200: FlowDiagramSerializer()})
@action(detail=True, pagination_class=None, filter_backends=[], methods=["get"])
# pylint: disable=unused-argument
def diagram(self, request: Request, slug: str) -> Response:
"""Return diagram for flow with slug `slug`, in the format used by flowchart.js"""
diagram = FlowDiagram(self.get_object(), request.user)
@ -243,6 +245,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
methods=["POST"],
parser_classes=(MultiPartParser,),
)
# pylint: disable=unused-argument
def set_background(self, request: Request, slug: str):
"""Set Flow background"""
flow: Flow = self.get_object()
@ -262,6 +265,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
filter_backends=[],
methods=["POST"],
)
# pylint: disable=unused-argument
def set_background_url(self, request: Request, slug: str):
"""Set Flow background (as URL)"""
flow: Flow = self.get_object()
@ -274,6 +278,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
},
)
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument
def execute(self, request: Request, slug: str):
"""Execute flow for current user"""
# Because we pre-plan the flow here, and not in the planner, we need to manually clear

View File

@ -19,6 +19,7 @@ LOGGER = get_logger()
class StageMarker:
"""Base stage marker class, no extra attributes, and has no special handler."""
# pylint: disable=unused-argument
def process(
self,
plan: "FlowPlan",

View File

@ -19,6 +19,7 @@ def delete_cache_prefix(prefix: str) -> int:
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_flows(sender, **kwargs):
"""set flow gauges"""
GAUGE_FLOWS_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}*") or []))
@ -26,6 +27,7 @@ def monitoring_set_flows(sender, **kwargs):
@receiver(post_save)
@receiver(pre_delete)
# pylint: disable=unused-argument
def invalidate_flow_cache(sender, instance, **_):
"""Invalidate flow cache when flow is updated"""
from authentik.flows.models import Flow, FlowStageBinding, Stage

View File

@ -91,6 +91,7 @@ class ChallengeStageView(StageView):
)
return HttpChallengeResponse(challenge)
# pylint: disable=unused-argument
def post(self, request: Request, *args, **kwargs) -> HttpResponse:
"""Handle challenge response"""
challenge: ChallengeResponse = self.get_response_instance(data=request.data)

View File

@ -113,7 +113,7 @@ class InvalidStageError(SentryIgnoredException):
@method_decorator(xframe_options_sameorigin, name="dispatch")
class FlowExecutorView(APIView):
"""Flow executor, passing requests to Stage Views"""
"""Stage 1 Flow executor, passing requests to Stage Views"""
permission_classes = [AllowAny]
@ -162,11 +162,11 @@ class FlowExecutorView(APIView):
token.delete()
if not isinstance(plan, FlowPlan):
return None
plan.context[PLAN_CONTEXT_IS_RESTORED] = True
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
self._logger.debug("f(exec): restored flow plan from token", plan=plan)
return plan
# pylint: disable=too-many-return-statements
# pylint: disable=unused-argument, too-many-return-statements
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
with Hub.current.start_span(
op="authentik.flow.executor.dispatch", description=self.flow.slug

View File

@ -47,6 +47,7 @@ class FlowInspectorPlanSerializer(PassiveSerializer):
"""Get the plan's context, sanitized"""
return sanitize_dict(plan.context)
# pylint: disable=unused-argument
def get_session_id(self, plan: FlowPlan) -> str:
"""Get a unique session ID"""
request: Request = self.context["request"]

View File

@ -5,20 +5,13 @@ from contextlib import contextmanager
from glob import glob
from json import dumps, loads
from json.decoder import JSONDecodeError
from pathlib import Path
from sys import argv, stderr
from time import time
from typing import Any, Optional
from typing import Any
from urllib.parse import urlparse
import yaml
from django.conf import ImproperlyConfigured
from watchdog.events import (
FileModifiedEvent,
FileSystemEvent,
FileSystemEventHandler,
)
from watchdog.observers import Observer
SEARCH_PATHS = ["authentik/lib/default.yml", "/etc/authentik/config.yml", ""] + glob(
"/etc/authentik/config.d/*.yml", recursive=True
@ -45,47 +38,9 @@ class ConfigLoader:
A variable like AUTHENTIK_POSTGRESQL__HOST would translate to postgresql.host"""
loaded_file = []
observer: Observer
class FSObserver(FileSystemEventHandler):
"""File system observer"""
loader: "ConfigLoader"
path: str
container: Optional[dict] = None
key: Optional[str] = None
def __init__(
self,
loader: "ConfigLoader",
path: str,
container: Optional[dict] = None,
key: Optional[str] = None,
) -> None:
super().__init__()
self.loader = loader
self.path = path
self.container = container
self.key = key
def on_any_event(self, event: FileSystemEvent):
if not isinstance(event, FileModifiedEvent):
return
if event.is_directory:
return
if event.src_path != self.path:
return
if self.container and self.key:
with open(self.path, "r", encoding="utf8") as _file:
self.container[self.key] = _file.read()
else:
self.loader.log("info", "Updating from changed file", file=self.path)
self.loader.update_from_file(self.path, watch=False)
def __init__(self):
super().__init__()
self.observer = Observer()
self.observer.start()
self.__config = {}
base_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "../.."))
for path in SEARCH_PATHS:
@ -126,11 +81,11 @@ class ConfigLoader:
root[key] = self.update(root.get(key, {}), value)
else:
if isinstance(value, str):
value = self.parse_uri(value, root, key)
value = self.parse_uri(value)
root[key] = value
return root
def parse_uri(self, value: str, container: dict[str, Any], key: Optional[str] = None, ) -> str:
def parse_uri(self, value: str) -> str:
"""Parse string values which start with a URI"""
url = urlparse(value)
if url.scheme == "env":
@ -139,22 +94,12 @@ class ConfigLoader:
try:
with open(url.path, "r", encoding="utf8") as _file:
value = _file.read()
if key:
self.observer.schedule(
ConfigLoader.FSObserver(
self,
url.path,
container,
key,
),
Path(url.path).parent,
)
except OSError as exc:
self.log("error", f"Failed to read config value from {url.path}: {exc}")
value = url.query
return value
def update_from_file(self, path: str, watch=True):
def update_from_file(self, path: str):
"""Update config from file contents"""
try:
with open(path, encoding="utf8") as file:
@ -162,8 +107,6 @@ class ConfigLoader:
self.update(self.__config, yaml.safe_load(file))
self.log("debug", "Loaded config", file=path)
self.loaded_file.append(path)
if watch:
self.observer.schedule(ConfigLoader.FSObserver(self, path), Path(path).parent)
except yaml.YAMLError as exc:
raise ImproperlyConfigured from exc
except PermissionError as exc:
@ -238,12 +181,13 @@ class ConfigLoader:
if comp not in root:
root[comp] = {}
root = root.get(comp, {})
self.parse_uri(value, root, path_parts[-1])
root[path_parts[-1]] = value
def y_bool(self, path: str, default=False) -> bool:
"""Wrapper for y that converts value into boolean"""
return str(self.y(path, default)).lower() == "true"
CONFIG = ConfigLoader()
if __name__ == "__main__":

View File

@ -64,7 +64,6 @@ outposts:
disable_embedded_outpost: false
ldap:
task_timeout_hours: 2
tls:
ciphers: null

View File

@ -159,6 +159,7 @@ class BaseEvaluator:
raise exc
return result
# pylint: disable=unused-argument
def handle_error(self, exc: Exception, expression_source: str): # pragma: no cover
"""Exception Handler"""
LOGGER.warning("Expression error", exc=exc)

View File

@ -3,6 +3,7 @@ from logging import Logger
from os import getpid
# pylint: disable=unused-argument
def add_process_id(logger: Logger, method_name: str, event_dict):
"""Add the current process ID"""
event_dict["pid"] = getpid()

View File

@ -5,7 +5,7 @@ from tempfile import mkstemp
from django.conf import ImproperlyConfigured
from django.test import TestCase
from authentik.lib.config import CONFIG, ENV_PREFIX, ConfigLoader
from authentik.lib.config import ENV_PREFIX, ConfigLoader
class TestConfig(TestCase):
@ -31,8 +31,8 @@ class TestConfig(TestCase):
"""Test URI parsing (environment)"""
config = ConfigLoader()
environ["foo"] = "bar"
self.assertEqual(config.parse_uri("env://foo", {}), "bar")
self.assertEqual(config.parse_uri("env://foo?bar", {}), "bar")
self.assertEqual(config.parse_uri("env://foo"), "bar")
self.assertEqual(config.parse_uri("env://foo?bar"), "bar")
def test_uri_file(self):
"""Test URI parsing (file load)"""
@ -41,8 +41,8 @@ class TestConfig(TestCase):
write(file, "foo".encode())
_, file2_name = mkstemp()
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
self.assertEqual(config.parse_uri(f"file://{file_name}", {}), "foo")
self.assertEqual(config.parse_uri(f"file://{file2_name}?def", {}), "def")
self.assertEqual(config.parse_uri(f"file://{file_name}"), "foo")
self.assertEqual(config.parse_uri(f"file://{file2_name}?def"), "def")
unlink(file_name)
unlink(file2_name)
@ -59,13 +59,3 @@ class TestConfig(TestCase):
config.update_from_file(file2_name)
unlink(file_name)
unlink(file2_name)
def test_update(self):
"""Test change to file"""
file, file_name = mkstemp()
write(file, b"test")
CONFIG.y_set("test.file", f"file://{file_name}")
self.assertEqual(CONFIG.y("test.file"), "test")
write(file, "test2")
self.assertEqual(CONFIG.y("test.file"), "test2")
unlink(file_name)

View File

@ -148,6 +148,7 @@ class OutpostViewSet(UsedByMixin, ModelViewSet):
@extend_schema(responses={200: OutpostHealthSerializer(many=True)})
@action(methods=["GET"], detail=True, pagination_class=None)
# pylint: disable=invalid-name, unused-argument
def health(self, request: Request, pk: int) -> Response:
"""Get outposts current health"""
outpost: Outpost = self.get_object()

View File

@ -91,6 +91,7 @@ class ServiceConnectionViewSet(
@extend_schema(responses={200: ServiceConnectionStateSerializer(many=False)})
@action(detail=True, pagination_class=None, filter_backends=[])
# pylint: disable=unused-argument, invalid-name
def state(self, request: Request, pk: str) -> Response:
"""Get the service connection's state"""
connection = self.get_object()

View File

@ -69,6 +69,7 @@ class OutpostConsumer(AuthJsonConsumer):
self.outpost = outpost
self.last_uid = self.channel_name
# pylint: disable=unused-argument
def disconnect(self, code):
if self.outpost and self.last_uid:
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
@ -126,6 +127,7 @@ class OutpostConsumer(AuthJsonConsumer):
response = WebsocketMessage(instruction=WebsocketMessageInstruction.ACK)
self.send_json(asdict(response))
# pylint: disable=unused-argument
def event_update(self, event): # pragma: no cover
"""Event handler which is called by post_save signals, Send update instruction"""
self.send_json(

View File

@ -183,7 +183,7 @@ class DockerController(BaseController):
try:
self.client.images.pull(image)
except DockerException: # pragma: no cover
image = f"ghcr.io/goauthentik/{self.outpost.type}:latest"
image = f"goauthentik.io/{self.outpost.type}:latest"
self.client.images.pull(image)
return image

View File

@ -23,6 +23,7 @@ UPDATE_TRIGGERING_MODELS = (
@receiver(pre_save, sender=Outpost)
# pylint: disable=unused-argument
def pre_save_outpost(sender, instance: Outpost, **_):
"""Pre-save checks for an outpost, if the name or config.kubernetes_namespace changes,
we call down and then wait for the up after save"""
@ -42,6 +43,7 @@ def pre_save_outpost(sender, instance: Outpost, **_):
@receiver(m2m_changed, sender=Outpost.providers.through)
# pylint: disable=unused-argument
def m2m_changed_update(sender, instance: Model, action: str, **_):
"""Update outpost on m2m change, when providers are added or removed"""
if action in ["post_add", "post_remove", "post_clear"]:
@ -49,6 +51,7 @@ def m2m_changed_update(sender, instance: Model, action: str, **_):
@receiver(post_save)
# pylint: disable=unused-argument
def post_save_update(sender, instance: Model, created: bool, **_):
"""If an Outpost is saved, Ensure that token is created/updated
@ -67,6 +70,7 @@ def post_save_update(sender, instance: Model, created: bool, **_):
@receiver(pre_delete, sender=Outpost)
# pylint: disable=unused-argument
def pre_delete_cleanup(sender, instance: Outpost, **_):
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
instance.user.delete()

View File

@ -144,6 +144,7 @@ class PolicyViewSet(
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
# pylint: disable=unused-argument, invalid-name
def test(self, request: Request, pk: str) -> Response:
"""Test policy"""
policy = self.get_object()

View File

@ -52,8 +52,6 @@ class PolicyEngine:
self.empty_result = True
if not isinstance(pbm, PolicyBindingModel): # pragma: no cover
raise ValueError(f"{pbm} is not instance of PolicyBindingModel")
if not user:
raise ValueError("User must be set")
self.__pbm = pbm
self.request = PolicyRequest(user)
self.request.obj = pbm

View File

@ -1,25 +1,14 @@
"""Event Matcher Policy API"""
from django.utils.translation import gettext as _
from rest_framework.fields import ChoiceField
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.policies.api.policies import PolicySerializer
from authentik.policies.event_matcher.models import EventMatcherPolicy, app_choices
from authentik.policies.event_matcher.models import EventMatcherPolicy
class EventMatcherPolicySerializer(PolicySerializer):
"""Event Matcher Policy Serializer"""
app = ChoiceField(
choices=app_choices(),
required=False,
help_text=_(
"Match events created by selected application. When left empty, "
"all applications are matched."
),
)
class Meta:
model = EventMatcherPolicy
fields = PolicySerializer.Meta.fields + [

View File

@ -1,25 +0,0 @@
# Generated by Django 4.1.5 on 2023-01-05 09:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_policies_event_matcher",
"0020_eventmatcherpolicy_authentik_p_policy__e605cf_idx",
),
]
operations = [
migrations.AlterField(
model_name="eventmatcherpolicy",
name="app",
field=models.TextField(
blank=True,
default="",
help_text="Match events created by selected application. When left empty, all applications are matched.",
),
),
]

View File

@ -33,6 +33,7 @@ class EventMatcherPolicy(Policy):
),
)
app = models.TextField(
choices=app_choices(),
blank=True,
default="",
help_text=_(

View File

@ -0,0 +1,24 @@
"""Source API Views"""
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.policies.api.policies import PolicySerializer
from authentik.policies.hibp.models import HaveIBeenPwendPolicy
class HaveIBeenPwendPolicySerializer(PolicySerializer):
"""Have I Been Pwned Policy Serializer"""
class Meta:
model = HaveIBeenPwendPolicy
fields = PolicySerializer.Meta.fields + ["password_field", "allowed_count"]
class HaveIBeenPwendPolicyViewSet(UsedByMixin, ModelViewSet):
"""Source Viewset"""
queryset = HaveIBeenPwendPolicy.objects.all()
serializer_class = HaveIBeenPwendPolicySerializer
filterset_fields = "__all__"
search_fields = ["name", "password_field"]
ordering = ["name"]

View File

@ -0,0 +1,11 @@
"""Authentik hibp app config"""
from django.apps import AppConfig
class AuthentikPolicyHIBPConfig(AppConfig):
"""Authentik hibp app config"""
name = "authentik.policies.hibp"
label = "authentik_policies_hibp"
verbose_name = "authentik Policies.HaveIBeenPwned"

View File

@ -0,0 +1,38 @@
# Generated by Django 3.0.6 on 2020-05-19 22:08
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_policies", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="HaveIBeenPwendPolicy",
fields=[
(
"policy_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_policies.Policy",
),
),
("allowed_count", models.IntegerField(default=0)),
],
options={
"verbose_name": "Have I Been Pwned Policy",
"verbose_name_plural": "Have I Been Pwned Policies",
},
bases=("authentik_policies.policy",),
),
]

View File

@ -0,0 +1,21 @@
# Generated by Django 3.0.8 on 2020-07-10 18:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_policies_hibp", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="haveibeenpwendpolicy",
name="password_field",
field=models.TextField(
default="password",
help_text="Field key to check, field keys defined in Prompt stages are available.",
),
),
]

View File

@ -0,0 +1,17 @@
# Generated by Django 4.1.2 on 2022-10-19 19:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_policies_hibp", "0002_haveibeenpwendpolicy_password_field"),
]
operations = [
migrations.AddIndex(
model_name="haveibeenpwendpolicy",
index=models.Index(fields=["policy_ptr_id"], name="authentik_p_policy__6957d7_idx"),
),
]

View File

@ -0,0 +1,71 @@
"""authentik HIBP Models"""
from hashlib import sha1
from django.db import models
from django.utils.translation import gettext as _
from rest_framework.serializers import BaseSerializer
from structlog.stdlib import get_logger
from authentik.lib.utils.http import get_http_session
from authentik.policies.models import Policy, PolicyResult
from authentik.policies.types import PolicyRequest
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
LOGGER = get_logger()
class HaveIBeenPwendPolicy(Policy):
"""DEPRECATED. Check if password is on HaveIBeenPwned's list by uploading the first
5 characters of the SHA1 Hash."""
password_field = models.TextField(
default="password",
help_text=_("Field key to check, field keys defined in Prompt stages are available."),
)
allowed_count = models.IntegerField(default=0)
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.policies.hibp.api import HaveIBeenPwendPolicySerializer
return HaveIBeenPwendPolicySerializer
@property
def component(self) -> str:
return "ak-policy-hibp-form"
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Check if password is in HIBP DB. Hashes given Password with SHA1, uses the first 5
characters of Password in request and checks if full hash is in response. Returns 0
if Password is not in result otherwise the count of how many times it was used."""
password = request.context.get(PLAN_CONTEXT_PROMPT, {}).get(
self.password_field, request.context.get(self.password_field)
)
if not password:
LOGGER.warning(
"Password field not set in Policy Request",
field=self.password_field,
fields=request.context.keys(),
)
return PolicyResult(False, _("Password not set in context"))
password = str(password)
pw_hash = sha1(password.encode("utf-8")).hexdigest() # nosec
url = f"https://api.pwnedpasswords.com/range/{pw_hash[:5]}"
result = get_http_session().get(url).text
final_count = 0
for line in result.split("\r\n"):
full_hash, count = line.split(":")
if pw_hash[5:] == full_hash.lower():
final_count = int(count)
LOGGER.debug("got hibp result", count=final_count, hash=pw_hash[:5])
if final_count > self.allowed_count:
message = _("Password exists on %(count)d online lists." % {"count": final_count})
return PolicyResult(False, message)
return PolicyResult(True)
class Meta(Policy.PolicyMeta):
verbose_name = _("Have I Been Pwned Policy")
verbose_name_plural = _("Have I Been Pwned Policies")

View File

@ -0,0 +1,44 @@
"""HIBP Policy tests"""
from django.test import TestCase
from guardian.shortcuts import get_anonymous_user
from authentik.lib.generators import generate_key
from authentik.policies.hibp.models import HaveIBeenPwendPolicy
from authentik.policies.types import PolicyRequest, PolicyResult
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
class TestHIBPPolicy(TestCase):
"""Test HIBP Policy"""
def test_invalid(self):
"""Test without password"""
policy = HaveIBeenPwendPolicy.objects.create(
name="test_invalid",
)
request = PolicyRequest(get_anonymous_user())
result: PolicyResult = policy.passes(request)
self.assertFalse(result.passing)
self.assertEqual(result.messages[0], "Password not set in context")
def test_false(self):
"""Failing password case"""
policy = HaveIBeenPwendPolicy.objects.create(
name="test_false",
)
request = PolicyRequest(get_anonymous_user())
request.context[PLAN_CONTEXT_PROMPT] = {"password": "password"} # nosec
result: PolicyResult = policy.passes(request)
self.assertFalse(result.passing)
self.assertTrue(result.messages[0].startswith("Password exists on "))
def test_true(self):
"""Positive password case"""
policy = HaveIBeenPwendPolicy.objects.create(
name="test_true",
)
request = PolicyRequest(get_anonymous_user())
request.context[PLAN_CONTEXT_PROMPT] = {"password": generate_key()}
result: PolicyResult = policy.passes(request)
self.assertTrue(result.passing)
self.assertEqual(result.messages, tuple())

View File

@ -1,10 +1,34 @@
# Generated by Django 4.1.3 on 2022-11-14 09:23
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_hibp_policy(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
HaveIBeenPwendPolicy = apps.get_model("authentik_policies_hibp", "HaveIBeenPwendPolicy")
PasswordPolicy = apps.get_model("authentik_policies_password", "PasswordPolicy")
PolicyBinding = apps.get_model("authentik_policies", "PolicyBinding")
for old_policy in HaveIBeenPwendPolicy.objects.using(db_alias).all():
new_policy = PasswordPolicy.objects.using(db_alias).create(
name=old_policy.name,
hibp_allowed_count=old_policy.allowed_count,
password_field=old_policy.password_field,
execution_logging=old_policy.execution_logging,
check_static_rules=False,
check_have_i_been_pwned=True,
)
PolicyBinding.objects.using(db_alias).filter(policy=old_policy).update(policy=new_policy)
old_policy.delete()
class Migration(migrations.Migration):
dependencies = [
("authentik_policies_hibp", "0003_haveibeenpwendpolicy_authentik_p_policy__6957d7_idx"),
("authentik_policies_password", "0004_passwordpolicy_authentik_p_policy__855e80_idx"),
]
@ -45,4 +69,5 @@ class Migration(migrations.Migration):
name="error_message",
field=models.TextField(blank=True),
),
migrations.RunPython(migrate_hibp_policy),
]

View File

@ -4,7 +4,6 @@ from django.urls.base import reverse
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.policies.password.models import PasswordPolicy
from authentik.stages.prompt.models import FieldTypes, Prompt, PromptStage
@ -17,7 +16,6 @@ class TestPasswordPolicyFlow(FlowTestCase):
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
password_prompt = Prompt.objects.create(
name=generate_id(),
field_key="password",
label="PASSWORD_LABEL",
type=FieldTypes.PASSWORD,

View File

@ -138,5 +138,5 @@ class PolicyProcess(PROCESS_CLASS):
try:
self.connection.send(self.profiling_wrapper())
except Exception as exc: # pylint: disable=broad-except
LOGGER.warning("Policy failed to run", exc=exception_to_string(exc))
LOGGER.warning("Policy failed to run", exc=exc)
self.connection.send(PolicyResult(False, str(exc)))

View File

@ -37,6 +37,7 @@ def update_score(request: HttpRequest, identifier: str, amount: int):
@receiver(login_failed)
# pylint: disable=unused-argument
def handle_failed_login(sender, request, credentials, **_):
"""Lower Score for failed login attempts"""
if "username" in credentials:
@ -44,12 +45,14 @@ def handle_failed_login(sender, request, credentials, **_):
@receiver(identification_failed)
# pylint: disable=unused-argument
def handle_identification_failed(sender, request, uid_field: str, **_):
"""Lower Score for failed identification attempts"""
update_score(request, uid_field, -1)
@receiver(user_logged_in)
# pylint: disable=unused-argument
def handle_successful_login(sender, request, user, **_):
"""Raise score for successful attempts"""
update_score(request, user.username, 1)

View File

@ -13,12 +13,14 @@ LOGGER = get_logger()
@receiver(monitoring_set)
# pylint: disable=unused-argument
def monitoring_set_policies(sender, **kwargs):
"""set policy gauges"""
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
@receiver(post_save)
# pylint: disable=unused-argument
def invalidate_policy_cache(sender, instance, **_):
"""Invalidate Policy cache when policy is updated"""
from authentik.policies.models import Policy, PolicyBinding

Some files were not shown because too many files have changed in this diff Show More