Compare commits

..

1 Commits

Author SHA1 Message Date
115973376f Updated hackathon page
Added exact UTC times, info about Discord, emotes, and "legalese" about prize money

Signed-off-by: Tana M Berry <tanamarieberry@yahoo.com>
2023-07-20 18:34:29 -05:00
647 changed files with 10128 additions and 23694 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.8.1
current_version = 2023.6.1
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -7,4 +7,3 @@ build/**
build_docs/**
Dockerfile
authentik/enterprise
blueprints/local

View File

@ -14,7 +14,7 @@ runs:
run: |
pipx install poetry || true
sudo apt update
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
sudo apt install -y libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v3
with:

View File

@ -1,2 +0,0 @@
enabled: true
preservePullRequestTitle: true

View File

@ -8,8 +8,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "ci:"
labels:
- dependencies
- package-ecosystem: gomod
directory: "/"
schedule:
@ -18,15 +16,11 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: npm
directory: "/web"
schedule:
interval: daily
time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
@ -38,18 +32,10 @@ updates:
patterns:
- "@babel/*"
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/eslint-*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
- package-ecosystem: npm
directory: "/website"
schedule:
@ -58,8 +44,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "website:"
labels:
- dependencies
groups:
docusaurus:
patterns:
@ -72,8 +56,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
- package-ecosystem: docker
directory: "/"
schedule:
@ -82,5 +64,3 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies

View File

@ -1,19 +1,23 @@
<!--
👋 Hi there! Welcome.
👋 Hello there! Welcome.
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
Please check the [Contributing guidelines](https://goauthentik.io/developer-docs/#how-can-i-contribute).
-->
## Details
<!--
Explain what this PR changes, what the rationale behind the change is, if any new requirements are introduced or any breaking changes caused by this PR.
- **Does this resolve an issue?**
Resolves #
Ideally also link an Issue for context that this PR will close using `closes #`
-->
REPLACE ME
## Changes
---
### New Features
- Adds feature which does x, y, and z.
### Breaking Changes
- Adds breaking change which causes \<issue\>.
## Checklist

View File

@ -88,8 +88,8 @@ jobs:
fail-fast: false
matrix:
psql:
- 11-alpine
- 12-alpine
- 15-alpine
steps:
- uses: actions/checkout@v3
- name: Setup authentik env

View File

@ -120,7 +120,7 @@ jobs:
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"

View File

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -31,7 +31,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -47,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -63,7 +63,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -95,7 +95,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"

View File

@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -29,7 +29,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"
@ -50,7 +50,7 @@ jobs:
- build-docs-only
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"

View File

@ -1,34 +0,0 @@
---
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup cache after PR is closed
on:
pull_request:
types:
- closed
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
# Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR; do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,61 +0,0 @@
---
name: authentik-compress-images
on:
push:
branches:
- main
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
pull_request:
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
workflow_dispatch:
jobs:
compress:
name: compress
runs-on: ubuntu-latest
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway
if: |
github.repository == 'goauthentik/authentik' &&
(github.event_name != 'pull_request' ||
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v3
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@main
with:
githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v5
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
title: "*: Auto compress images"
branch-suffix: timestamp
commit-messsage: "*: compress images"
body: ${{ steps.compress.outputs.markdown }}
delete-branch: true
signoff: true
- uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -1,31 +0,0 @@
name: authentik-publish-source-docs
on:
push:
branches:
- main
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
publish-source-docs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: generate docs
run: |
poetry run make migrate
poetry run ak build_source_docs
- name: Publish
uses: netlify/actions/cli@master
with:
args: deploy --dir=source_docs --prod
env:
NETLIFY_SITE_ID: eb246b7b-1d83-4f69-89f7-01a936b4ca59
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}

View File

@ -110,7 +110,7 @@ jobs:
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
cache: "npm"

View File

@ -1,45 +0,0 @@
# Rename transifex pull requests to have a correct naming
# Also enables auto squash-merge
name: authentik-translation-transifex-rename
on:
pull_request:
types: [opened, reopened]
jobs:
rename_pr:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Get current title
id: title
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ github.event.pull_request.number }}
merge-method: squash

View File

@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v3
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@v3.8.1
- uses: actions/setup-node@v3.7.0
with:
node-version: "20"
registry-url: "https://registry.npmjs.org"

1
.gitignore vendored
View File

@ -205,4 +205,3 @@ data/
# Local Netlify folder
.netlify
.ruff_cache
source_docs/

View File

@ -31,8 +31,7 @@
"!Format sequence",
"!Condition sequence",
"!Env sequence",
"!Env scalar",
"!If sequence"
"!Env scalar"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",

View File

@ -20,7 +20,7 @@ WORKDIR /work/web
RUN npm ci --include=dev && npm run build
# Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.11.5-slim-bookworm AS poetry-locker
FROM docker.io/python:3.11.4-slim-bullseye AS poetry-locker
WORKDIR /work
COPY ./pyproject.toml /work
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.21.0-bookworm AS go-builder
FROM docker.io/golang:1.20.6-bullseye AS go-builder
WORKDIR /work
@ -39,13 +39,12 @@ COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
COPY ./cmd /work/cmd
COPY ./authentik/lib /work/authentik/lib
COPY ./web/static.go /work/web/static.go
COPY ./internal /work/internal
COPY ./go.mod /work/go.mod
COPY ./go.sum /work/go.sum
RUN go build -o /work/bin/authentik ./cmd/server/
RUN go build -o /work/authentik ./cmd/server/
# Stage 5: MaxMind GeoIP
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
@ -62,7 +61,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 6: Run
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
FROM docker.io/python:3.11.4-slim-bullseye AS final-image
ARG GIT_BUILD_HASH
ARG VERSION
@ -82,13 +81,13 @@ COPY --from=geoip /usr/share/GeoIP /geoip
RUN apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
# Required for runtime
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \
apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
@ -105,7 +104,7 @@ COPY ./tests /tests
COPY ./manage.py /
COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /work/bin/authentik /bin/authentik
COPY --from=go-builder /work/authentik /bin/authentik
COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/help/ /website/help/

View File

@ -140,15 +140,13 @@ web-watch:
touch web/dist/.gitkeep
cd web && npm run watch
web-storybook-watch:
cd web && npm run storybook
web-lint-fix:
cd web && npm run prettier
web-lint:
cd web && npm run lint
cd web && npm run lit-analyse
# TODO: The analyzer hasn't run correctly in awhile.
# cd web && npm run lit-analyse
web-check-compile:
cd web && npm run tsc

View File

@ -16,8 +16,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| --- | --- |
| 2023.5.x | ✅ |
| 2023.6.x | ✅ |
| 2023.8.x | ✅ |
## Reporting a Vulnerability
@ -27,8 +27,6 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
| Score | Severity |
| --- | --- |
| 0.0 | None |
| 0.1 3.9 | Low |
| 4.0 6.9 | Medium |

View File

@ -1,8 +1,8 @@
"""authentik root module"""
"""authentik"""
from os import environ
from typing import Optional
__version__ = "2023.8.1"
__version__ = "2023.6.1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -2,43 +2,6 @@
from rest_framework import pagination
from rest_framework.response import Response
PAGINATION_COMPONENT_NAME = "Pagination"
PAGINATION_SCHEMA = {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
}
class Pagination(pagination.PageNumberPagination):
"""Pagination which includes total pages and current page"""
@ -72,7 +35,41 @@ class Pagination(pagination.PageNumberPagination):
return {
"type": "object",
"properties": {
"pagination": {"$ref": f"#/components/schemas/{PAGINATION_COMPONENT_NAME}"},
"pagination": {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
},
"results": schema,
},
"required": ["pagination", "results"],

View File

@ -1,6 +1,5 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_array_type,
@ -9,9 +8,6 @@ from drf_spectacular.plumbing import (
)
from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
def build_standard_type(obj, **kwargs):
@ -32,7 +28,7 @@ GENERIC_ERROR = build_object_type(
VALIDATION_ERROR = build_object_type(
description=_("Validation Error"),
properties={
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
"code": build_standard_type(OpenApiTypes.STR),
},
required=[],
@ -40,19 +36,7 @@ VALIDATION_ERROR = build_object_type(
)
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA):
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints.
Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
@ -60,10 +44,19 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
<https://github.com/tfranzel/drf-spectacular/issues/101>.
"""
create_component(generator, PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA)
def create_component(name, schema, type_=ResolvedComponent.SCHEMA):
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
generic_error = create_component(generator, "GenericError", GENERIC_ERROR)
validation_error = create_component(generator, "ValidationError", VALIDATION_ERROR)
generic_error = create_component("GenericError", GENERIC_ERROR)
validation_error = create_component("ValidationError", VALIDATION_ERROR)
for path in result["paths"].values():
for method in path.values():

View File

@ -93,10 +93,10 @@ class ConfigView(APIView):
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
},
"capabilities": self.get_capabilities(),
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
"cache_timeout": int(CONFIG.get("redis.cache_timeout")),
"cache_timeout_flows": int(CONFIG.get("redis.cache_timeout_flows")),
"cache_timeout_policies": int(CONFIG.get("redis.cache_timeout_policies")),
"cache_timeout_reputation": int(CONFIG.get("redis.cache_timeout_reputation")),
}
)

View File

@ -21,14 +21,9 @@ _other_urls = []
for _authentik_app in get_apps():
try:
api_urls = import_module(f"{_authentik_app.name}.urls")
except (ModuleNotFoundError, ImportError) as exc:
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
except (ModuleNotFoundError, ImportError):
continue
if not hasattr(api_urls, "api_urlpatterns"):
LOGGER.debug(
"App does not define API URLs",
app_name=_authentik_app.name,
)
continue
urls: list = getattr(api_urls, "api_urlpatterns")
for url in urls:

View File

@ -45,8 +45,3 @@ entries:
attrs:
name: "%(uid)s"
password: "%(uid)s"
- model: authentik_core.user
identifiers:
username: "%(uid)s-no-password"
attrs:
name: "%(uid)s"

View File

@ -7,5 +7,7 @@ entries:
state: absent
- identifiers:
name: "%(id)s"
expression: |
return True
model: authentik_policies_expression.expressionpolicy
state: absent

View File

@ -9,8 +9,6 @@ context:
mapping:
key1: value
key2: 2
context1: context-nested-value
context2: !Context context1
entries:
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
state: !Format ["%s", present]
@ -36,7 +34,6 @@ entries:
model: authentik_policies_expression.expressionpolicy
- attrs:
attributes:
env_null: !Env [bar-baz, null]
policy_pk1:
!Format [
"%s-%s",
@ -100,7 +97,6 @@ entries:
[list, with, items, !Format ["foo-%s", !Context foo]],
]
if_true_simple: !If [!Context foo, true, text]
if_short: !If [!Context foo]
if_false_simple: !If [null, false, 2]
enumerate_mapping_to_mapping: !Enumerate [
!Context mapping,
@ -145,7 +141,6 @@ entries:
]
]
]
nested_context: !Context context2
identifiers:
name: test
conditions:

View File

@ -155,7 +155,6 @@ class TestBlueprintsV1(TransactionTestCase):
},
"if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True,
"if_short": True,
"if_false_simple": 2,
"enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value",
@ -212,10 +211,8 @@ class TestBlueprintsV1(TransactionTestCase):
],
},
},
"nested_context": "context-nested-value",
"env_null": None,
}
).exists()
)
)
self.assertTrue(
OAuthSource.objects.filter(

View File

@ -51,9 +51,3 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
user: User = User.objects.filter(username=self.uid).first()
self.assertIsNotNone(user)
self.assertTrue(user.check_password(self.uid))
def test_user_null(self):
"""Test user"""
user: User = User.objects.filter(username=f"{self.uid}-no-password").first()
self.assertIsNotNone(user)
self.assertFalse(user.has_usable_password())

View File

@ -223,11 +223,11 @@ class Env(YAMLTag):
if isinstance(node, ScalarNode):
self.key = node.value
if isinstance(node, SequenceNode):
self.key = loader.construct_object(node.value[0])
self.default = loader.construct_object(node.value[1])
self.key = node.value[0].value
self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
return getenv(self.key) or self.default
return getenv(self.key, self.default)
class Context(YAMLTag):
@ -242,15 +242,13 @@ class Context(YAMLTag):
if isinstance(node, ScalarNode):
self.key = node.value
if isinstance(node, SequenceNode):
self.key = loader.construct_object(node.value[0])
self.default = loader.construct_object(node.value[1])
self.key = node.value[0].value
self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
value = self.default
if self.key in blueprint.context:
value = blueprint.context[self.key]
if isinstance(value, YAMLTag):
return value.resolve(entry, blueprint)
return value
@ -262,7 +260,7 @@ class Format(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.format_string = loader.construct_object(node.value[0])
self.format_string = node.value[0].value
self.args = []
for raw_node in node.value[1:]:
self.args.append(loader.construct_object(raw_node))
@ -341,7 +339,7 @@ class Condition(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.mode = loader.construct_object(node.value[0])
self.mode = node.value[0].value
self.args = []
for raw_node in node.value[1:]:
self.args.append(loader.construct_object(raw_node))
@ -374,12 +372,8 @@ class If(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.condition = loader.construct_object(node.value[0])
if len(node.value) == 1:
self.when_true = True
self.when_false = False
else:
self.when_true = loader.construct_object(node.value[1])
self.when_false = loader.construct_object(node.value[2])
self.when_true = loader.construct_object(node.value[1])
self.when_false = loader.construct_object(node.value[2])
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.condition, YAMLTag):
@ -416,7 +410,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__()
self.iterable = loader.construct_object(node.value[0])
self.output_body = loader.construct_object(node.value[1])
self.output_body = node.value[1].value
self.item_body = loader.construct_object(node.value[2])
self.__current_context: tuple[Any, Any] = tuple()

View File

@ -35,7 +35,6 @@ from authentik.core.models import (
Source,
UserSourceConnection,
)
from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage
from authentik.lib.models import SerializerModel
from authentik.outposts.models import OutpostServiceConnection
@ -200,6 +199,9 @@ class Importer:
serializer_kwargs = {}
model_instance = existing_models.first()
if not isinstance(model(), BaseMetaModel) and model_instance:
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
self.logger.debug("instance exists, skipping")
return None
self.logger.debug(
"initialise serializer with instance",
model=model,
@ -210,9 +212,7 @@ class Importer:
serializer_kwargs["partial"] = True
else:
self.logger.debug(
"initialised new serializer instance",
model=model,
**cleanse_dict(updated_identifiers),
"initialised new serializer instance", model=model, **updated_identifiers
)
model_instance = model()
# pk needs to be set on the model instance otherwise a new one will be generated
@ -268,34 +268,21 @@ class Importer:
try:
serializer = self._validate_single(entry)
except EntryInvalidError as exc:
# For deleting objects we don't need the serializer to be valid
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
continue
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
return False
if not serializer:
continue
state = entry.get_state(self.__import)
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]:
instance = serializer.instance
if (
instance
and not instance._state.adding
and state == BlueprintEntryDesiredState.CREATED
):
self.logger.debug(
"instance exists, skipping",
model=model,
instance=instance,
pk=instance.pk,
)
else:
instance = serializer.save()
self.logger.debug("updated model", model=instance)
if state in [
BlueprintEntryDesiredState.PRESENT,
BlueprintEntryDesiredState.CREATED,
]:
model = serializer.save()
if "pk" in entry.identifiers:
self.__pk_map[entry.identifiers["pk"]] = instance.pk
entry._state = BlueprintEntryState(instance)
self.__pk_map[entry.identifiers["pk"]] = model.pk
entry._state = BlueprintEntryState(model)
self.logger.debug("updated model", model=model)
elif state == BlueprintEntryDesiredState.ABSENT:
instance: Optional[Model] = serializer.instance
if instance.pk:
@ -322,6 +309,5 @@ class Importer:
self.logger.debug("Blueprint validation failed")
for log in logs:
getattr(self.logger, log.get("log_level"))(**log)
self.logger.debug("Finished blueprint import validation")
self.__import = orig_import
return successful, logs

View File

@ -31,7 +31,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
required = attrs["required"]
instance = BlueprintInstance.objects.filter(**identifiers).first()
if not instance and required:
raise ValidationError({"identifiers": "Required blueprint does not exist"})
raise ValidationError("Required blueprint does not exist")
self.blueprint_instance = instance
return super().validate(attrs)

View File

@ -49,7 +49,7 @@ class GroupSerializer(ModelSerializer):
users_obj = ListSerializer(
child=GroupMemberSerializer(), read_only=True, source="users", required=False
)
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
parent_name = CharField(source="parent.name", read_only=True)
num_pk = IntegerField(read_only=True)

View File

@ -47,7 +47,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
attrs.setdefault("user", request.user)
attrs.setdefault("intent", TokenIntents.INTENT_API)
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"})
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
return attrs
class Meta:

View File

@ -15,13 +15,7 @@ from django.utils.http import urlencode
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import gettext as _
from django_filters.filters import (
BooleanFilter,
CharFilter,
ModelMultipleChoiceFilter,
MultipleChoiceFilter,
UUIDFilter,
)
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter, UUIDFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import (
@ -123,35 +117,27 @@ class UserSerializer(ModelSerializer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
self.fields["password"] = CharField(required=False, allow_null=True)
self.fields["password"] = CharField(required=False)
def create(self, validated_data: dict) -> User:
"""If this serializer is used in the blueprint context, we allow for
directly setting a password. However should be done via the `set_password`
method instead of directly setting it like rest_framework."""
password = validated_data.pop("password", None)
instance: User = super().create(validated_data)
self._set_password(instance, password)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
instance.set_password(validated_data["password"])
instance.save()
return instance
def update(self, instance: User, validated_data: dict) -> User:
"""Same as `create` above, set the password directly if we're in a blueprint
context"""
password = validated_data.pop("password", None)
instance = super().update(instance, validated_data)
self._set_password(instance, password)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
instance.set_password(validated_data["password"])
instance.save()
return instance
def _set_password(self, instance: User, password: Optional[str]):
"""Set password of user if we're in a blueprint context, and if it's an empty
string then use an unusable password"""
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and password:
instance.set_password(password)
instance.save()
if len(instance.password) == 0:
instance.set_unusable_password()
instance.save()
def validate_path(self, path: str) -> str:
"""Validate path"""
if path[:1] == "/" or path[-1] == "/":
@ -215,7 +201,7 @@ class UserSelfSerializer(ModelSerializer):
)
def get_groups(self, _: User):
"""Return only the group names a user is member of"""
for group in self.instance.all_groups().order_by("name"):
for group in self.instance.ak_groups.all():
yield {
"name": group.name,
"pk": group.pk,
@ -314,11 +300,11 @@ class UsersFilter(FilterSet):
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
uuid = UUIDFilter(field_name="uuid")
path = CharFilter(field_name="path")
path = CharFilter(
field_name="path",
)
path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
type = MultipleChoiceFilter(choices=UserTypes.choices, field_name="type")
groups_by_name = ModelMultipleChoiceFilter(
field_name="ak_groups__name",
to_field_name="name",

View File

@ -1,21 +0,0 @@
"""Build source docs"""
from pathlib import Path
from django.core.management.base import BaseCommand
from pdoc import pdoc
from pdoc.render import configure
class Command(BaseCommand):
"""Build source docs"""
def handle(self, **options):
configure(
docformat="markdown",
mermaid=True,
logo="https://goauthentik.io/img/icon_top_brand_colour.svg",
)
pdoc(
"authentik",
output_directory=Path("./source_docs"),
)

View File

@ -1,11 +1,55 @@
# Generated by Django 3.2.8 on 2021-10-10 16:16
from os import environ
import django.db.models.deletion
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
import authentik.core.models
def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from django.contrib.auth.hashers import make_password
User = apps.get_model("authentik_core", "User")
db_alias = schema_editor.connection.alias
akadmin, _ = User.objects.using(db_alias).get_or_create(
username="akadmin",
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
name="authentik Default Admin",
)
password = None
if "TF_BUILD" in environ or settings.TEST:
password = "akadmin" # noqa # nosec
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
if password:
akadmin.password = make_password(password)
else:
akadmin.password = make_password(None)
akadmin.save()
def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
Group = apps.get_model("authentik_core", "Group")
User = apps.get_model("authentik_core", "User")
# Creates a default admin group
group, _ = Group.objects.using(db_alias).get_or_create(
is_superuser=True,
defaults={
"name": "authentik Admins",
},
)
group.users.set(User.objects.filter(username="akadmin"))
group.save()
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0002_auto_20200523_1133"),
@ -75,6 +119,9 @@ class Migration(migrations.Migration):
model_name="user",
name="is_staff",
),
migrations.RunPython(
code=create_default_user,
),
migrations.AddField(
model_name="user",
name="is_superuser",
@ -154,6 +201,9 @@ class Migration(migrations.Migration):
default=False, help_text="Users added to this group will be superusers."
),
),
migrations.RunPython(
code=create_default_admin_group,
),
migrations.AlterModelManagers(
name="user",
managers=[

View File

@ -1,6 +1,7 @@
# Generated by Django 3.2.8 on 2021-10-10 16:12
import uuid
from os import environ
import django.db.models.deletion
from django.apps.registry import Apps
@ -34,6 +35,29 @@ def fix_duplicates(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Token.objects.using(db_alias).filter(identifier=ident["identifier"]).delete()
def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.models import TokenIntents
User = apps.get_model("authentik_core", "User")
Token = apps.get_model("authentik_core", "Token")
db_alias = schema_editor.connection.alias
akadmin = User.objects.using(db_alias).filter(username="akadmin")
if not akadmin.exists():
return
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
return
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
Token.objects.using(db_alias).create(
identifier="authentik-bootstrap-token",
user=akadmin.first(),
intent=TokenIntents.INTENT_API,
expiring=False,
key=key,
)
class Migration(migrations.Migration):
replaces = [
("authentik_core", "0018_auto_20210330_1345"),
@ -190,6 +214,9 @@ class Migration(migrations.Migration):
"verbose_name_plural": "Authenticated Sessions",
},
),
migrations.RunPython(
code=create_default_user_token,
),
migrations.AlterField(
model_name="token",
name="intent",

View File

@ -12,7 +12,7 @@ def migrate_user_type(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.models import UserTypes
for user in User.objects.using(db_alias).all():
user.type = UserTypes.INTERNAL
user.type = UserTypes.DEFAULT
if "goauthentik.io/user/service-account" in user.attributes:
user.type = UserTypes.SERVICE_ACCOUNT
if "goauthentik.io/user/override-ips" in user.attributes:

View File

@ -1,41 +0,0 @@
# Generated by Django 4.1.10 on 2023-07-21 12:54
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_user_type_v2(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
User = apps.get_model("authentik_core", "User")
from authentik.core.models import UserTypes
for user in User.objects.using(db_alias).all():
if user.type != "default":
continue
user.type = UserTypes.INTERNAL
user.save()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0030_user_type"),
]
operations = [
migrations.AlterField(
model_name="user",
name="type",
field=models.TextField(
choices=[
("internal", "Internal"),
("external", "External"),
("service_account", "Service Account"),
("internal_service_account", "Internal Service Account"),
],
default="internal",
),
),
migrations.RunPython(migrate_user_type_v2),
]

View File

@ -60,14 +60,14 @@ def default_token_key():
"""Default token key"""
# We use generate_id since the chars in the key should be easy
# to use in Emails (for verification) and URLs (for recovery)
return generate_id(CONFIG.get_int("default_token_length"))
return generate_id(int(CONFIG.get("default_token_length")))
class UserTypes(models.TextChoices):
"""User types, both for grouping, licensing and permissions in the case
of the internal_service_account"""
INTERNAL = "internal"
DEFAULT = "default"
EXTERNAL = "external"
# User-created service accounts
@ -79,7 +79,7 @@ class UserTypes(models.TextChoices):
class Group(SerializerModel):
"""Group model which supports a basic hierarchy and has attributes"""
"""Custom Group model which supports a basic hierarchy"""
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -113,7 +113,27 @@ class Group(SerializerModel):
def is_member(self, user: "User") -> bool:
"""Recursively check if `user` is member of us, or any parent."""
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = %s
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth - 1
FROM authentik_core_group,parents
WHERE (
authentik_core_group.parent_id = parents.group_uuid and
parents.relative_depth > -20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid;
"""
groups = Group.objects.raw(query, [self.group_uuid])
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
def __str__(self):
return f"Group {self.name}"
@ -128,20 +148,20 @@ class Group(SerializerModel):
class UserManager(DjangoUserManager):
"""User manager that doesn't assign is_superuser and is_staff"""
"""Custom user manager that doesn't assign is_superuser and is_staff"""
def create_user(self, username, email=None, password=None, **extra_fields):
"""User manager that doesn't assign is_superuser and is_staff"""
"""Custom user manager that doesn't assign is_superuser and is_staff"""
return self._create_user(username, email, password, **extra_fields)
class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""authentik User model, based on django's contrib auth user model."""
"""Custom User model to allow easier adding of user-based settings"""
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
name = models.TextField(help_text=_("User's display name."))
path = models.TextField(default="users")
type = models.TextField(choices=UserTypes.choices, default=UserTypes.INTERNAL)
type = models.TextField(choices=UserTypes.choices, default=UserTypes.DEFAULT)
sources = models.ManyToManyField("Source", through="UserSourceConnection")
ak_groups = models.ManyToManyField("Group", related_name="users")
@ -156,45 +176,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""Get the default user path"""
return User._meta.get_field("path").default
def all_groups(self) -> QuerySet[Group]:
"""Recursively get all groups this user is a member of.
At least one query is done to get the direct groups of the user, with groups
there are at most 3 queries done"""
direct_groups = list(
x for x in self.ak_groups.all().values_list("pk", flat=True).iterator()
)
if len(direct_groups) < 1:
return Group.objects.none()
query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = ANY(%s)
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth + 1
FROM authentik_core_group, parents
WHERE (
authentik_core_group.group_uuid = parents.parent_id and
parents.relative_depth < 20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid, name
ORDER BY name;
"""
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
return Group.objects.filter(pk__in=group_pks)
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
"""Get a dictionary containing the attributes from all groups the user belongs to,
including the users attributes"""
final_attributes = {}
if request and hasattr(request, "tenant"):
always_merger.merge(final_attributes, request.tenant.attributes)
for group in self.all_groups().order_by("name"):
for group in self.ak_groups.all().order_by("name"):
always_merger.merge(final_attributes, group.attributes)
always_merger.merge(final_attributes, self.attributes)
return final_attributes
@ -208,7 +196,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
@cached_property
def is_superuser(self) -> bool:
"""Get supseruser status based on membership in a group with superuser status"""
return self.all_groups().filter(is_superuser=True).exists()
return self.ak_groups.filter(is_superuser=True).exists()
@property
def is_staff(self) -> bool:

View File

@ -78,6 +78,7 @@
</main>
{% endblock %}
<footer class="pf-c-login__footer">
<p></p>
<ul class="pf-c-list pf-m-inline">
{% for link in footer_links %}
<li>

View File

@ -13,9 +13,7 @@ class TestGroups(TestCase):
user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id())
group = Group.objects.create(name=generate_id())
other_group = Group.objects.create(name=generate_id())
group.users.add(user)
other_group.users.add(user)
self.assertTrue(group.is_member(user))
self.assertFalse(group.is_member(user2))
@ -23,26 +21,22 @@ class TestGroups(TestCase):
"""Test parent membership"""
user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id())
parent = Group.objects.create(name=generate_id())
child = Group.objects.create(name=generate_id(), parent=parent)
child.users.add(user)
self.assertTrue(child.is_member(user))
self.assertTrue(parent.is_member(user))
self.assertFalse(child.is_member(user2))
self.assertFalse(parent.is_member(user2))
first = Group.objects.create(name=generate_id())
second = Group.objects.create(name=generate_id(), parent=first)
second.users.add(user)
self.assertTrue(first.is_member(user))
self.assertFalse(first.is_member(user2))
def test_group_membership_parent_extra(self):
"""Test parent membership"""
user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id())
parent = Group.objects.create(name=generate_id())
second = Group.objects.create(name=generate_id(), parent=parent)
first = Group.objects.create(name=generate_id())
second = Group.objects.create(name=generate_id(), parent=first)
third = Group.objects.create(name=generate_id(), parent=second)
second.users.add(user)
self.assertTrue(parent.is_member(user))
self.assertFalse(parent.is_member(user2))
self.assertTrue(second.is_member(user))
self.assertFalse(second.is_member(user2))
self.assertTrue(first.is_member(user))
self.assertFalse(first.is_member(user2))
self.assertFalse(third.is_member(user))
self.assertFalse(third.is_member(user2))

View File

@ -28,19 +28,6 @@ class TestUsersAPI(APITestCase):
self.admin = create_test_admin_user()
self.user = User.objects.create(username="test-user")
def test_filter_type(self):
"""Test API filtering by type"""
self.client.force_login(self.admin)
user = create_test_admin_user(type=UserTypes.EXTERNAL)
response = self.client.get(
reverse("authentik_api:user-list"),
data={
"type": UserTypes.EXTERNAL,
"username": user.username,
},
)
self.assertEqual(response.status_code, 200)
def test_metrics(self):
"""Test user's metrics"""
self.client.force_login(self.admin)

View File

@ -21,7 +21,7 @@ def create_test_flow(
)
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
def create_test_admin_user(name: Optional[str] = None) -> User:
"""Generate a test-admin user"""
uid = generate_id(20) if not name else name
group = Group.objects.create(name=uid, is_superuser=True)
@ -29,7 +29,6 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
username=uid,
name=uid,
email=f"{uid}@goauthentik.io",
**kwargs,
)
user.set_password(uid)
user.save()
@ -37,12 +36,12 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
return user
def create_test_tenant(**kwargs) -> Tenant:
def create_test_tenant() -> Tenant:
"""Generate a test tenant, removing all other tenants to make sure this one
matches."""
uid = generate_id(20)
Tenant.objects.all().delete()
return Tenant.objects.create(domain=uid, default=True, **kwargs)
return Tenant.objects.create(domain=uid, default=True)
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:

View File

@ -35,13 +35,13 @@ class LicenseSerializer(ModelSerializer):
"name",
"key",
"expiry",
"internal_users",
"users",
"external_users",
]
extra_kwargs = {
"name": {"read_only": True},
"expiry": {"read_only": True},
"internal_users": {"read_only": True},
"users": {"read_only": True},
"external_users": {"read_only": True},
}
@ -49,7 +49,7 @@ class LicenseSerializer(ModelSerializer):
class LicenseSummary(PassiveSerializer):
"""Serializer for license status"""
internal_users = IntegerField(required=True)
users = IntegerField(required=True)
external_users = IntegerField(required=True)
valid = BooleanField()
show_admin_warning = BooleanField()
@ -62,9 +62,9 @@ class LicenseSummary(PassiveSerializer):
class LicenseForecastSerializer(PassiveSerializer):
"""Serializer for license forecast"""
internal_users = IntegerField(required=True)
users = IntegerField(required=True)
external_users = IntegerField(required=True)
forecasted_internal_users = IntegerField(required=True)
forecasted_users = IntegerField(required=True)
forecasted_external_users = IntegerField(required=True)
@ -111,7 +111,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
latest_valid = datetime.fromtimestamp(total.exp)
response = LicenseSummary(
data={
"internal_users": total.internal_users,
"users": total.users,
"external_users": total.external_users,
"valid": total.is_valid(),
"show_admin_warning": show_admin_warning,
@ -135,18 +135,18 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
def forecast(self, request: Request) -> Response:
"""Forecast how many users will be required in a year"""
last_month = now() - timedelta(days=30)
# Forecast for internal users
internal_in_last_month = User.objects.filter(
type=UserTypes.INTERNAL, date_joined__gte=last_month
# Forecast for default users
users_in_last_month = User.objects.filter(
type=UserTypes.DEFAULT, date_joined__gte=last_month
).count()
# Forecast for external users
external_in_last_month = LicenseKey.get_external_user_count()
forecast_for_months = 12
response = LicenseForecastSerializer(
data={
"internal_users": LicenseKey.get_default_user_count(),
"users": LicenseKey.get_default_user_count(),
"external_users": LicenseKey.get_external_user_count(),
"forecasted_internal_users": (internal_in_last_month * forecast_for_months),
"forecasted_users": (users_in_last_month * forecast_for_months),
"forecasted_external_users": (external_in_last_month * forecast_for_months),
}
)

View File

@ -1,36 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-23 10:06
import django.contrib.postgres.indexes
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_enterprise", "0001_initial"),
]
operations = [
migrations.RenameField(
model_name="license",
old_name="users",
new_name="internal_users",
),
migrations.AlterField(
model_name="license",
name="key",
field=models.TextField(),
),
migrations.AddIndex(
model_name="license",
index=django.contrib.postgres.indexes.HashIndex(
fields=["key"], name="authentik_e_key_523e13_hash"
),
),
migrations.AlterModelOptions(
name="licenseusage",
options={
"verbose_name": "License Usage",
"verbose_name_plural": "License Usage Records",
},
),
]

View File

@ -9,13 +9,11 @@ from time import mktime
from uuid import uuid4
from cryptography.exceptions import InvalidSignature
from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate
from cryptography.x509 import Certificate, load_pem_x509_certificate
from dacite import from_dict
from django.contrib.postgres.indexes import HashIndex
from django.db import models
from django.db.models.query import QuerySet
from django.utils.timezone import now
from django.utils.translation import gettext as _
from guardian.shortcuts import get_anonymous_user
from jwt import PyJWTError, decode, get_unverified_header
from rest_framework.exceptions import ValidationError
@ -48,8 +46,8 @@ class LicenseKey:
exp: int
name: str
internal_users: int = 0
external_users: int = 0
users: int
external_users: int
flags: list[LicenseFlags] = field(default_factory=list)
@staticmethod
@ -63,8 +61,8 @@ class LicenseKey:
if len(x5c) < 1:
raise ValidationError("Unable to verify license")
try:
our_cert = load_der_x509_certificate(b64decode(x5c[0]))
intermediate = load_der_x509_certificate(b64decode(x5c[1]))
our_cert = load_pem_x509_certificate(b64decode(x5c[0]))
intermediate = load_pem_x509_certificate(b64decode(x5c[1]))
our_cert.verify_directly_issued_by(intermediate)
intermediate.verify_directly_issued_by(get_licensing_key())
except (InvalidSignature, TypeError, ValueError, Error):
@ -75,7 +73,7 @@ class LicenseKey:
decode(
jwt,
our_cert.public_key(),
algorithms=["ES512"],
algorithms=["ES521"],
audience=get_license_aud(),
),
)
@ -89,7 +87,7 @@ class LicenseKey:
active_licenses = License.objects.filter(expiry__gte=now())
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
for lic in active_licenses:
total.internal_users += lic.internal_users
total.users += lic.users
total.external_users += lic.external_users
exp_ts = int(mktime(lic.expiry.timetuple()))
if total.exp == 0:
@ -107,7 +105,7 @@ class LicenseKey:
@staticmethod
def get_default_user_count():
"""Get current default user count"""
return LicenseKey.base_user_qs().filter(type=UserTypes.INTERNAL).count()
return LicenseKey.base_user_qs().filter(type=UserTypes.DEFAULT).count()
@staticmethod
def get_external_user_count():
@ -125,7 +123,7 @@ class LicenseKey:
Only checks the current count, no historical data is checked"""
default_users = self.get_default_user_count()
if default_users > self.internal_users:
if default_users > self.users:
return False
active_users = self.get_external_user_count()
if active_users > self.external_users:
@ -155,11 +153,11 @@ class License(models.Model):
"""An authentik enterprise license"""
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
key = models.TextField()
key = models.TextField(unique=True)
name = models.TextField()
expiry = models.DateTimeField()
internal_users = models.BigIntegerField()
users = models.BigIntegerField()
external_users = models.BigIntegerField()
@property
@ -167,9 +165,6 @@ class License(models.Model):
"""Get parsed license status"""
return LicenseKey.validate(self.key)
class Meta:
indexes = (HashIndex(fields=("key",)),)
def usage_expiry():
"""Keep license usage records for 3 months"""
@ -188,7 +183,3 @@ class LicenseUsage(ExpiringModel):
within_limits = models.BooleanField()
record_date = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = _("License Usage")
verbose_name_plural = _("License Usage Records")

View File

@ -24,7 +24,7 @@ class EnterprisePolicy(Policy):
def passes(self, request: PolicyRequest) -> PolicyResult:
if not LicenseKey.get_total().is_valid():
return PolicyResult(False)
if request.user.type != UserTypes.INTERNAL:
if request.user.type != UserTypes.DEFAULT:
return PolicyResult(False)
return PolicyResult(True)

View File

@ -13,6 +13,6 @@ def pre_save_license(sender: type[License], instance: License, **_):
"""Extract data from license jwt and save it into model"""
status = instance.status
instance.name = status.name
instance.internal_users = status.internal_users
instance.users = status.users
instance.external_users = status.external_users
instance.expiry = datetime.fromtimestamp(status.exp, tz=get_current_timezone())

View File

@ -23,7 +23,7 @@ class TestEnterpriseLicense(TestCase):
aud="",
exp=_exp,
name=generate_id(),
internal_users=100,
users=100,
external_users=100,
)
),
@ -32,7 +32,7 @@ class TestEnterpriseLicense(TestCase):
"""Check license verification"""
lic = License.objects.create(key=generate_id())
self.assertTrue(lic.status.is_valid())
self.assertEqual(lic.internal_users, 100)
self.assertEqual(lic.users, 100)
def test_invalid(self):
"""Test invalid license"""
@ -46,7 +46,7 @@ class TestEnterpriseLicense(TestCase):
aud="",
exp=_exp,
name=generate_id(),
internal_users=100,
users=100,
external_users=100,
)
),
@ -58,7 +58,7 @@ class TestEnterpriseLicense(TestCase):
lic2 = License.objects.create(key=generate_id())
self.assertTrue(lic2.status.is_valid())
total = LicenseKey.get_total()
self.assertEqual(total.internal_users, 200)
self.assertEqual(total.users, 200)
self.assertEqual(total.external_users, 200)
self.assertEqual(total.exp, _exp)
self.assertTrue(total.is_valid())

View File

@ -4,7 +4,7 @@ from json import loads
import django_filters
from django.db.models.aggregates import Count
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import ExtractDay
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
@ -134,11 +134,11 @@ class EventViewSet(ModelViewSet):
"""Get the top_n events grouped by user count"""
filtered_action = request.query_params.get("action", EventAction.LOGIN)
top_n = int(request.query_params.get("top_n", "15"))
events = (
return Response(
get_objects_for_user(request.user, "authentik_events.view_event")
.filter(action=filtered_action)
.exclude(context__authorized_application=None)
.annotate(application=KeyTransform("authorized_application", "context"))
.annotate(application=KeyTextTransform("authorized_application", "context"))
.annotate(user_pk=KeyTextTransform("pk", "user"))
.values("application")
.annotate(counted_events=Count("application"))
@ -146,7 +146,6 @@ class EventViewSet(ModelViewSet):
.values("unique_users", "application", "counted_events")
.order_by("-counted_events")[:top_n]
)
return Response(EventTopPerUserSerializer(instance=events, many=True).data)
@extend_schema(
methods=["GET"],

View File

@ -39,7 +39,7 @@ class NotificationTransportSerializer(ModelSerializer):
mode = attrs.get("mode")
if mode in [TransportMode.WEBHOOK, TransportMode.WEBHOOK_SLACK]:
if "webhook_url" not in attrs or attrs.get("webhook_url", "") == "":
raise ValidationError({"webhook_url": "Webhook URL may not be empty."})
raise ValidationError("Webhook URL may not be empty.")
return attrs
class Meta:

View File

@ -76,20 +76,9 @@ class TaskInfo:
return cache.get_many(cache.keys(CACHE_KEY_PREFIX + name)).values()
return cache.get(CACHE_KEY_PREFIX + name, None)
@property
def full_name(self) -> str:
"""Get the full cache key with task name and UID"""
key = CACHE_KEY_PREFIX + self.task_name
if self.result.uid:
uid_suffix = f":{self.result.uid}"
key += uid_suffix
if not self.task_name.endswith(uid_suffix):
self.task_name += uid_suffix
return key
def delete(self):
"""Delete task info from cache"""
return cache.delete(self.full_name)
return cache.delete(CACHE_KEY_PREFIX + self.task_name)
def update_metrics(self):
"""Update prometheus metrics"""
@ -108,8 +97,12 @@ class TaskInfo:
def save(self, timeout_hours=6):
"""Save task into cache"""
key = CACHE_KEY_PREFIX + self.task_name
if self.result.uid:
key += f":{self.result.uid}"
self.task_name += f":{self.result.uid}"
self.update_metrics()
cache.set(self.full_name, self, timeout=timeout_hours * 60 * 60)
cache.set(key, self, timeout=timeout_hours * 60 * 60)
class MonitoredTask(Task):

View File

@ -1,43 +0,0 @@
"""Test Monitored tasks"""
from django.test import TestCase
from authentik.events.monitored_tasks import MonitoredTask, TaskInfo, TaskResult, TaskResultStatus
from authentik.lib.generators import generate_id
from authentik.root.celery import CELERY_APP
class TestMonitoredTasks(TestCase):
"""Test Monitored tasks"""
def test_failed_successful_remove_state(self):
"""Test that a task with `save_on_success` set to `False` that failed saves
a state, and upon successful completion will delete the state"""
should_fail = True
uid = generate_id()
@CELERY_APP.task(
bind=True,
base=MonitoredTask,
)
def test_task(self: MonitoredTask):
self.save_on_success = False
self.set_uid(uid)
self.set_status(
TaskResult(TaskResultStatus.ERROR if should_fail else TaskResultStatus.SUCCESSFUL)
)
# First test successful run
should_fail = False
test_task.delay().get()
self.assertIsNone(TaskInfo.by_name(f"test_task:{uid}"))
# Then test failed
should_fail = True
test_task.delay().get()
info = TaskInfo.by_name(f"test_task:{uid}")
self.assertEqual(info.result.status, TaskResultStatus.ERROR)
# Then after that, the state should be removed
should_fail = False
test_task.delay().get()
self.assertIsNone(TaskInfo.by_name(f"test_task:{uid}"))

View File

@ -33,7 +33,7 @@ PLAN_CONTEXT_SOURCE = "source"
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
# was restored.
PLAN_CONTEXT_IS_RESTORED = "is_restored"
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_flows")
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_flows"))
CACHE_PREFIX = "goauthentik.io/flows/planner/"

10
authentik/lib/apps.py Normal file
View File

@ -0,0 +1,10 @@
"""authentik lib app config"""
from django.apps import AppConfig
class AuthentikLibConfig(AppConfig):
"""authentik lib app config"""
name = "authentik.lib"
label = "authentik_lib"
verbose_name = "authentik lib"

View File

@ -1,10 +0,0 @@
package lib
import _ "embed"
//go:embed default.yml
var defaultConfig []byte
func DefaultConfig() []byte {
return defaultConfig
}

View File

@ -213,14 +213,6 @@ class ConfigLoader:
attr: Attr = get_path_from_dict(root, path, sep=sep, default=Attr(default))
return attr.value
def get_int(self, path: str, default=0) -> int:
"""Wrapper for get that converts value into int"""
try:
return int(self.get(path, default))
except ValueError as exc:
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
return default
def get_bool(self, path: str, default=False) -> bool:
"""Wrapper for get that converts value into boolean"""
return str(self.get(path, default)).lower() == "true"

View File

@ -11,11 +11,7 @@ postgresql:
listen:
listen_http: 0.0.0.0:9000
listen_https: 0.0.0.0:9443
listen_ldap: 0.0.0.0:3389
listen_ldaps: 0.0.0.0:6636
listen_radius: 0.0.0.0:1812
listen_metrics: 0.0.0.0:9300
listen_debug: 0.0.0.0:9900
trusted_proxy_cidrs:
- 127.0.0.0/8
- 10.0.0.0/8
@ -36,9 +32,6 @@ redis:
cache_timeout_policies: 300
cache_timeout_reputation: 300
paths:
media: ./media
debug: false
remote_debug: false
@ -84,9 +77,6 @@ ldap:
tls:
ciphers: null
reputation:
expiry: 86400
cookie_domain: null
disable_update_check: false
disable_startup_analytics: false

View File

@ -112,7 +112,7 @@ class BaseEvaluator:
@staticmethod
def expr_is_group_member(user: User, **group_filters) -> bool:
"""Check if `user` is member of group with name `group_name`"""
return user.all_groups().filter(**group_filters).exists()
return user.ak_groups.filter(**group_filters).exists()
@staticmethod
def expr_user_by(**filters) -> Optional[User]:

View File

@ -98,7 +98,7 @@ def traces_sampler(sampling_context: dict) -> float:
def before_send(event: dict, hint: dict) -> Optional[dict]:
"""Check if error is database error, and ignore if so"""
# pylint: disable=no-name-in-module
from psycopg.errors import Error
from psycopg2.errors import Error
ignored_classes = (
# Inbuilt types

View File

@ -79,15 +79,3 @@ class TestConfig(TestCase):
config.update_from_file(file2_name)
unlink(file_name)
unlink(file2_name)
def test_get_int(self):
"""Test get_int"""
config = ConfigLoader()
config.set("foo", 1234)
self.assertEqual(config.get_int("foo"), 1234)
def test_get_int_invalid(self):
"""Test get_int"""
config = ConfigLoader()
config.set("foo", "bar")
self.assertEqual(config.get_int("foo", 1234), 1234)

View File

@ -1,22 +1,16 @@
"""Base Kubernetes Reconciler"""
from dataclasses import asdict
from json import dumps
from typing import TYPE_CHECKING, Generic, Optional, TypeVar
from dacite.core import from_dict
from django.utils.text import slugify
from jsonpatch import JsonPatchConflict, JsonPatchException, JsonPatchTestFailed, apply_patch
from kubernetes.client import ApiClient, V1ObjectMeta
from kubernetes.client import V1ObjectMeta
from kubernetes.client.exceptions import ApiException, OpenApiException
from kubernetes.client.models.v1_deployment import V1Deployment
from kubernetes.client.models.v1_pod import V1Pod
from requests import Response
from structlog.stdlib import get_logger
from urllib3.exceptions import HTTPError
from authentik import __version__
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.controllers.base import ControllerException
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate
if TYPE_CHECKING:
@ -40,23 +34,11 @@ class KubernetesObjectReconciler(Generic[T]):
self.namespace = controller.outpost.config.kubernetes_namespace
self.logger = get_logger().bind(type=self.__class__.__name__)
def get_patch(self):
"""Get any patches that apply to this CRD"""
patches = self.controller.outpost.config.kubernetes_json_patches
if not patches:
return None
return patches.get(self.reconciler_name(), None)
@property
def is_embedded(self) -> bool:
"""Return true if the current outpost is embedded"""
return self.controller.outpost.managed == MANAGED_OUTPOST
@staticmethod
def reconciler_name() -> str:
"""A name this reconciler is identified by in the configuration"""
raise NotImplementedError
@property
def noop(self) -> bool:
"""Return true if this object should not be created/updated/deleted in this cluster"""
@ -73,32 +55,6 @@ class KubernetesObjectReconciler(Generic[T]):
}
).lower()
def get_patched_reference_object(self) -> T:
"""Get patched reference object"""
reference = self.get_reference_object()
patch = self.get_patch()
try:
json = ApiClient().sanitize_for_serialization(reference)
# Custom objects will not be known to the clients openapi types
except AttributeError:
json = asdict(reference)
try:
ref = json
if patch is not None:
ref = apply_patch(json, patch)
except (JsonPatchException, JsonPatchConflict, JsonPatchTestFailed) as exc:
raise ControllerException(f"JSON Patch failed: {exc}") from exc
mock_response = Response()
mock_response.data = dumps(ref)
try:
result = ApiClient().deserialize(mock_response, reference.__class__.__name__)
# Custom objects will not be known to the clients openapi types
except AttributeError:
result = from_dict(reference.__class__, data=ref)
return result
# pylint: disable=invalid-name
def up(self):
"""Create object if it doesn't exist, update if needed or recreate if needed."""
@ -106,7 +62,7 @@ class KubernetesObjectReconciler(Generic[T]):
if self.noop:
self.logger.debug("Object is noop")
return
reference = self.get_patched_reference_object()
reference = self.get_reference_object()
try:
try:
current = self.retrieve()
@ -173,16 +129,6 @@ class KubernetesObjectReconciler(Generic[T]):
if current.metadata.labels != reference.metadata.labels:
raise NeedsUpdate()
patch = self.get_patch()
if patch is not None:
current_json = ApiClient().sanitize_for_serialization(current)
try:
if apply_patch(current_json, patch) != current_json:
raise NeedsUpdate()
except (JsonPatchException, JsonPatchConflict, JsonPatchTestFailed) as exc:
raise ControllerException(f"JSON Patch failed: {exc}") from exc
def create(self, reference: T):
"""API Wrapper to create object"""
raise NotImplementedError

View File

@ -43,10 +43,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
self.api = AppsV1Api(controller.client)
self.outpost = self.controller.outpost
@staticmethod
def reconciler_name() -> str:
return "deployment"
def reconcile(self, current: V1Deployment, reference: V1Deployment):
compare_ports(
current.spec.template.spec.containers[0].ports,

View File

@ -24,10 +24,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
super().__init__(controller)
self.api = CoreV1Api(controller.client)
@staticmethod
def reconciler_name() -> str:
return "secret"
def reconcile(self, current: V1Secret, reference: V1Secret):
super().reconcile(current, reference)
for key in reference.data.keys():

View File

@ -20,10 +20,6 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
super().__init__(controller)
self.api = CoreV1Api(controller.client)
@staticmethod
def reconciler_name() -> str:
return "service"
def reconcile(self, current: V1Service, reference: V1Service):
compare_ports(current.spec.ports, reference.spec.ports)
# run the base reconcile last, as that will probably raise NeedsUpdate

View File

@ -71,10 +71,6 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
self.api_ex = ApiextensionsV1Api(controller.client)
self.api = CustomObjectsApi(controller.client)
@staticmethod
def reconciler_name() -> str:
return "prometheus servicemonitor"
@property
def noop(self) -> bool:
return (not self._crd_exists()) or (self.is_embedded)

View File

@ -64,19 +64,12 @@ class KubernetesController(BaseController):
super().__init__(outpost, connection)
self.client = KubernetesClient(connection)
self.reconcilers = {
SecretReconciler.reconciler_name(): SecretReconciler,
DeploymentReconciler.reconciler_name(): DeploymentReconciler,
ServiceReconciler.reconciler_name(): ServiceReconciler,
PrometheusServiceMonitorReconciler.reconciler_name(): (
PrometheusServiceMonitorReconciler
),
"secret": SecretReconciler,
"deployment": DeploymentReconciler,
"service": ServiceReconciler,
"prometheus servicemonitor": PrometheusServiceMonitorReconciler,
}
self.reconcile_order = [
SecretReconciler.reconciler_name(),
DeploymentReconciler.reconciler_name(),
ServiceReconciler.reconciler_name(),
PrometheusServiceMonitorReconciler.reconciler_name(),
]
self.reconcile_order = ["secret", "deployment", "service", "prometheus servicemonitor"]
def up(self):
try:

View File

@ -1,7 +1,7 @@
"""Outpost models"""
from dataclasses import asdict, dataclass, field
from datetime import datetime
from typing import Any, Iterable, Optional
from typing import Iterable, Optional
from uuid import uuid4
from dacite.core import from_dict
@ -75,7 +75,6 @@ class OutpostConfig:
kubernetes_service_type: str = field(default="ClusterIP")
kubernetes_disabled_components: list[str] = field(default_factory=list)
kubernetes_image_pull_secrets: list[str] = field(default_factory=list)
kubernetes_json_patches: Optional[dict[str, list[dict[str, Any]]]] = field(default=None)
class OutpostModel(Model):

View File

@ -19,7 +19,7 @@ from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
LOGGER = get_logger()
FORK_CTX = get_context("fork")
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_policies")
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_policies"))
PROCESS_CLASS = FORK_CTX.Process

View File

@ -1,33 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-31 10:42
from django.db import migrations, models
import authentik.policies.reputation.models
class Migration(migrations.Migration):
dependencies = [
("authentik_policies_reputation", "0004_reputationpolicy_authentik_p_policy__8f0d70_idx"),
]
operations = [
migrations.AddField(
model_name="reputation",
name="expires",
field=models.DateTimeField(
default=authentik.policies.reputation.models.reputation_expiry
),
),
migrations.AddField(
model_name="reputation",
name="expiring",
field=models.BooleanField(default=True),
),
migrations.AlterModelOptions(
name="reputation",
options={
"verbose_name": "Reputation Score",
"verbose_name_plural": "Reputation Scores",
},
),
]

View File

@ -1,17 +1,13 @@
"""authentik reputation request policy"""
from datetime import timedelta
from uuid import uuid4
from django.db import models
from django.db.models import Sum
from django.db.models.query_utils import Q
from django.utils.timezone import now
from django.utils.translation import gettext as _
from rest_framework.serializers import BaseSerializer
from structlog import get_logger
from authentik.core.models import ExpiringModel
from authentik.lib.config import CONFIG
from authentik.lib.models import SerializerModel
from authentik.lib.utils.http import get_client_ip
from authentik.policies.models import Policy
@ -21,11 +17,6 @@ LOGGER = get_logger()
CACHE_KEY_PREFIX = "goauthentik.io/policies/reputation/scores/"
def reputation_expiry():
"""Reputation expiry"""
return now() + timedelta(seconds=CONFIG.get_int("reputation.expiry"))
class ReputationPolicy(Policy):
"""Return true if request IP/target username's score is below a certain threshold"""
@ -68,7 +59,7 @@ class ReputationPolicy(Policy):
verbose_name_plural = _("Reputation Policies")
class Reputation(ExpiringModel, SerializerModel):
class Reputation(SerializerModel):
"""Reputation for user and or IP."""
reputation_uuid = models.UUIDField(primary_key=True, unique=True, default=uuid4)
@ -78,8 +69,6 @@ class Reputation(ExpiringModel, SerializerModel):
ip_geo_data = models.JSONField(default=dict)
score = models.BigIntegerField(default=0)
expires = models.DateTimeField(default=reputation_expiry)
updated = models.DateTimeField(auto_now_add=True)
@property
@ -92,6 +81,4 @@ class Reputation(ExpiringModel, SerializerModel):
return f"Reputation {self.identifier}/{self.ip} @ {self.score}"
class Meta:
verbose_name = _("Reputation Score")
verbose_name_plural = _("Reputation Scores")
unique_together = ("identifier", "ip")

View File

@ -13,7 +13,7 @@ from authentik.policies.reputation.tasks import save_reputation
from authentik.stages.identification.signals import identification_failed
LOGGER = get_logger()
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_reputation")
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_reputation"))
def update_score(request: HttpRequest, identifier: str, amount: int):

View File

@ -1,6 +1,6 @@
"""id_token utils"""
from dataclasses import asdict, dataclass, field
from typing import TYPE_CHECKING, Any, Optional, Union
from typing import TYPE_CHECKING, Any, Optional
from django.db import models
from django.http import HttpRequest
@ -57,7 +57,7 @@ class IDToken:
# Subject, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.2
sub: Optional[str] = None
# Audience, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.3
aud: Optional[Union[str, list[str]]] = None
aud: Optional[str] = None
# Expiration time, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.4
exp: Optional[int] = None
# Issued at, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.6

View File

@ -2,7 +2,6 @@
import base64
import binascii
import json
from dataclasses import asdict
from functools import cached_property
from hashlib import sha256
from typing import Any, Optional
@ -359,7 +358,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
@id_token.setter
def id_token(self, value: IDToken):
self.token = value.to_access_token(self.provider)
self._id_token = json.dumps(asdict(value))
self._id_token = json.dumps(value.to_dict())
@property
def at_hash(self):
@ -401,7 +400,7 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
@id_token.setter
def id_token(self, value: IDToken):
self._id_token = json.dumps(asdict(value))
self._id_token = json.dumps(value.to_dict())
@property
def serializer(self) -> Serializer:

View File

@ -11,7 +11,6 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_cert,
from authentik.policies.models import PolicyBinding
from authentik.providers.oauth2.constants import (
GRANT_TYPE_CLIENT_CREDENTIALS,
GRANT_TYPE_PASSWORD,
SCOPE_OPENID,
SCOPE_OPENID_EMAIL,
SCOPE_OPENID_PROFILE,
@ -151,36 +150,3 @@ class TestTokenClientCredentials(OAuthTestCase):
)
self.assertEqual(jwt["given_name"], self.user.name)
self.assertEqual(jwt["preferred_username"], self.user.username)
jwt = decode(
body["id_token"],
key=self.provider.signing_key.public_key,
algorithms=[alg],
audience=self.provider.client_id,
)
self.assertEqual(jwt["given_name"], self.user.name)
self.assertEqual(jwt["preferred_username"], self.user.username)
def test_successful_password(self):
"""test successful (password grant)"""
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
{
"grant_type": GRANT_TYPE_PASSWORD,
"scope": f"{SCOPE_OPENID} {SCOPE_OPENID_EMAIL} {SCOPE_OPENID_PROFILE}",
"client_id": self.provider.client_id,
"username": "sa",
"password": self.token.key,
},
)
self.assertEqual(response.status_code, 200)
body = loads(response.content.decode())
self.assertEqual(body["token_type"], TOKEN_TYPE)
_, alg = self.provider.jwt_key
jwt = decode(
body["access_token"],
key=self.provider.signing_key.public_key,
algorithms=[alg],
audience=self.provider.client_id,
)
self.assertEqual(jwt["given_name"], self.user.name)
self.assertEqual(jwt["preferred_username"], self.user.username)

View File

@ -375,9 +375,7 @@ class AuthorizationFlowInitView(PolicyAccessView):
):
self.request.session[SESSION_KEY_LAST_LOGIN_UID] = login_uid
return self.handle_no_permission()
scope_descriptions = UserInfoView().get_scope_descriptions(
self.params.scope, self.params.provider
)
scope_descriptions = UserInfoView().get_scope_descriptions(self.params.scope)
# Regardless, we start the planner and return to it
planner = FlowPlanner(self.provider.authorization_flow)
planner.allow_empty_flows = True

View File

@ -55,7 +55,7 @@ def validate_code(code: int, request: HttpRequest) -> Optional[HttpResponse]:
if not app:
return None
scope_descriptions = UserInfoView().get_scope_descriptions(token.scope, token.provider)
scope_descriptions = UserInfoView().get_scope_descriptions(token.scope)
planner = FlowPlanner(token.provider.authorization_flow)
planner.allow_empty_flows = True
try:

View File

@ -459,13 +459,13 @@ class TokenView(View):
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
LOGGER.debug("Refreshing refresh token")
return TokenResponse(self.create_refresh_response())
if self.params.grant_type in [GRANT_TYPE_CLIENT_CREDENTIALS, GRANT_TYPE_PASSWORD]:
LOGGER.debug("Client credentials/password grant")
if self.params.grant_type == GRANT_TYPE_CLIENT_CREDENTIALS:
LOGGER.debug("Client credentials grant")
return TokenResponse(self.create_client_credentials_response())
if self.params.grant_type == GRANT_TYPE_DEVICE_CODE:
LOGGER.debug("Device code grant")
return TokenResponse(self.create_device_code_response())
raise TokenError("unsupported_grant_type")
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
except (TokenError, DeviceCodeError) as error:
return TokenResponse(error.create_dict(), status=400)
except UserAuthError as error:

View File

@ -40,14 +40,10 @@ class UserInfoView(View):
token: Optional[RefreshToken]
def get_scope_descriptions(
self, scopes: list[str], provider: OAuth2Provider
) -> list[PermissionDict]:
def get_scope_descriptions(self, scopes: list[str]) -> list[PermissionDict]:
"""Get a list of all Scopes's descriptions"""
scope_descriptions = []
for scope in ScopeMapping.objects.filter(scope_name__in=scopes, provider=provider).order_by(
"scope_name"
):
for scope in ScopeMapping.objects.filter(scope_name__in=scopes).order_by("scope_name"):
scope_descriptions.append(PermissionDict(id=scope.scope_name, name=scope.description))
# GitHub Compatibility Scopes are handled differently, since they required custom paths
# Hence they don't exist as Scope objects

View File

@ -59,9 +59,7 @@ class ProxyProviderSerializer(ProviderSerializer):
attrs.get("mode", ProxyMode.PROXY) == ProxyMode.PROXY
and attrs.get("internal_host", "") == ""
):
raise ValidationError(
{"internal_host": _("Internal host cannot be empty when forward auth is disabled.")}
)
raise ValidationError(_("Internal host cannot be empty when forward auth is disabled."))
return attrs
def create(self, validated_data: dict):

View File

@ -31,10 +31,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
super().__init__(controller)
self.api = NetworkingV1Api(controller.client)
@staticmethod
def reconciler_name() -> str:
return "ingress"
def _check_annotations(self, reference: V1Ingress):
"""Check that all annotations *we* set are correct"""
for key, value in self.get_ingress_annotations().items():

View File

@ -17,28 +17,24 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler):
if not self.reconciler.crd_exists():
self.reconciler = Traefik2MiddlewareReconciler(controller)
@staticmethod
def reconciler_name() -> str:
return "traefik middleware"
@property
def noop(self) -> bool:
return self.reconciler.noop
def reconcile(self, current: TraefikMiddleware, reference: TraefikMiddleware):
return self.reconciler.reconcile(current, reference)
return self.reconcile(current, reference)
def get_reference_object(self) -> TraefikMiddleware:
return self.reconciler.get_reference_object()
return self.get_reference_object()
def create(self, reference: TraefikMiddleware):
return self.reconciler.create(reference)
return self.create(reference)
def delete(self, reference: TraefikMiddleware):
return self.reconciler.delete(reference)
return self.delete(reference)
def retrieve(self) -> TraefikMiddleware:
return self.reconciler.retrieve()
return self.retrieve()
def update(self, current: TraefikMiddleware, reference: TraefikMiddleware):
return self.reconciler.update(current, reference)
return self.update(current, reference)

View File

@ -67,10 +67,6 @@ class Traefik3MiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware]
self.crd_version = "v1alpha1"
self.crd_plural = "middlewares"
@staticmethod
def reconciler_name() -> str:
return "traefik middleware"
@property
def noop(self) -> bool:
if not ProxyProvider.objects.filter(

View File

@ -16,9 +16,7 @@ class ProxyKubernetesController(KubernetesController):
DeploymentPort(9300, "http-metrics", "tcp"),
DeploymentPort(9443, "https", "tcp"),
]
self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler
self.reconcilers[
TraefikMiddlewareReconciler.reconciler_name()
] = TraefikMiddlewareReconciler
self.reconcile_order.append(IngressReconciler.reconciler_name())
self.reconcile_order.append(TraefikMiddlewareReconciler.reconciler_name())
self.reconcilers["ingress"] = IngressReconciler
self.reconcilers["traefik middleware"] = TraefikMiddlewareReconciler
self.reconcile_order.append("ingress")
self.reconcile_order.append("traefik middleware")

View File

@ -69,7 +69,7 @@ class ProxyProviderTests(APITestCase):
self.assertEqual(response.status_code, 400)
self.assertJSONEqual(
response.content.decode(),
{"internal_host": ["Internal host cannot be empty when forward auth is disabled."]},
{"non_field_errors": ["Internal host cannot be empty when forward auth is disabled."]},
)
def test_create_defaults(self):

View File

@ -68,7 +68,7 @@ class SCIMClient(Generic[T, SchemaType]):
"""Get Service provider config"""
default_config = ServiceProviderConfiguration.default()
try:
return ServiceProviderConfiguration.model_validate(
return ServiceProviderConfiguration.parse_obj(
self._request("GET", "/ServiceProviderConfig")
)
except (ValidationError, SCIMRequestException) as exc:

View File

@ -74,7 +74,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
if not raw_scim_group:
raise StopSync(ValueError("No group mappings configured"), obj)
try:
scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group))
scim_group = SCIMGroupSchema.parse_obj(delete_none_values(raw_scim_group))
except ValidationError as exc:
raise StopSync(exc, obj) from exc
if not scim_group.externalId:
@ -99,8 +99,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
response = self._request(
"POST",
"/Groups",
json=scim_group.model_dump(
mode="json",
data=scim_group.json(
exclude_unset=True,
),
)
@ -114,8 +113,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
return self._request(
"PUT",
f"/Groups/{scim_group.id}",
json=scim_group.model_dump(
mode="json",
data=scim_group.json(
exclude_unset=True,
),
)
@ -162,13 +160,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
*ops: PatchOperation,
):
req = PatchRequest(Operations=ops)
self._request(
"PATCH",
f"/Groups/{group_id}",
json=req.model_dump(
mode="json",
),
)
self._request("PATCH", f"/Groups/{group_id}", data=req.json())
def _patch_add_users(self, group: Group, users_set: set[int]):
"""Add users in users_set to group"""

View File

@ -52,7 +52,7 @@ class ServiceProviderConfiguration(BaseServiceProviderConfiguration):
class PatchRequest(BasePatchRequest):
"""PatchRequest which correctly sets schemas"""
schemas: tuple[str] = ("urn:ietf:params:scim:api:messages:2.0:PatchOp",)
schemas: tuple[str] = ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]
class SCIMError(BaseSCIMError):

View File

@ -64,7 +64,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
if not raw_scim_user:
raise StopSync(ValueError("No user mappings configured"), obj)
try:
scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user))
scim_user = SCIMUserSchema.parse_obj(delete_none_values(raw_scim_user))
except ValidationError as exc:
raise StopSync(exc, obj) from exc
if not scim_user.externalId:
@ -77,8 +77,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
response = self._request(
"POST",
"/Users",
json=scim_user.model_dump(
mode="json",
data=scim_user.json(
exclude_unset=True,
),
)
@ -91,8 +90,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
self._request(
"PUT",
f"/Users/{connection.id}",
json=scim_user.model_dump(
mode="json",
data=scim_user.json(
exclude_unset=True,
),
)

View File

@ -47,7 +47,6 @@ class SCIMMembershipTests(TestCase):
def test_member_add(self):
"""Test member add"""
config = ServiceProviderConfiguration.default()
# pylint: disable=assigning-non-slot
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -61,7 +60,7 @@ class SCIMMembershipTests(TestCase):
with Mocker() as mocker:
mocker.get(
"https://localhost/ServiceProviderConfig",
json=config.model_dump(),
json=config.dict(),
)
mocker.post(
"https://localhost/Users",
@ -105,7 +104,7 @@ class SCIMMembershipTests(TestCase):
with Mocker() as mocker:
mocker.get(
"https://localhost/ServiceProviderConfig",
json=config.model_dump(),
json=config.dict(),
)
mocker.patch(
f"https://localhost/Groups/{group_scim_id}",
@ -132,7 +131,6 @@ class SCIMMembershipTests(TestCase):
def test_member_remove(self):
"""Test member remove"""
config = ServiceProviderConfiguration.default()
# pylint: disable=assigning-non-slot
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -146,7 +144,7 @@ class SCIMMembershipTests(TestCase):
with Mocker() as mocker:
mocker.get(
"https://localhost/ServiceProviderConfig",
json=config.model_dump(),
json=config.dict(),
)
mocker.post(
"https://localhost/Users",
@ -190,7 +188,7 @@ class SCIMMembershipTests(TestCase):
with Mocker() as mocker:
mocker.get(
"https://localhost/ServiceProviderConfig",
json=config.model_dump(),
json=config.dict(),
)
mocker.patch(
f"https://localhost/Groups/{group_scim_id}",
@ -217,7 +215,7 @@ class SCIMMembershipTests(TestCase):
with Mocker() as mocker:
mocker.get(
"https://localhost/ServiceProviderConfig",
json=config.model_dump(),
json=config.dict(),
)
mocker.patch(
f"https://localhost/Groups/{group_scim_id}",

View File

@ -44,11 +44,7 @@ def config_loggers(*args, **kwargs):
def after_task_publish_hook(sender=None, headers=None, body=None, **kwargs):
"""Log task_id after it was published"""
info = headers if "task" in headers else body
LOGGER.info(
"Task published",
task_id=info.get("id", "").replace("-", ""),
task_name=info.get("task", ""),
)
LOGGER.info("Task published", task_id=info.get("id", ""), task_name=info.get("task", ""))
@task_prerun.connect
@ -63,9 +59,7 @@ def task_prerun_hook(task_id: str, task, *args, **kwargs):
def task_postrun_hook(task_id, task, *args, retval=None, state=None, **kwargs):
"""Log task_id on worker"""
CTX_TASK_ID.set(...)
LOGGER.info(
"Task finished", task_id=task_id.replace("-", ""), task_name=task.__name__, state=state
)
LOGGER.info("Task finished", task_id=task_id, task_name=task.__name__, state=state)
@task_failure.connect

View File

@ -2,7 +2,7 @@
from functools import lru_cache
from uuid import uuid4
from psycopg import connect
from psycopg2 import connect
from authentik.lib.config import CONFIG
@ -30,7 +30,7 @@ def get_install_id_raw():
user=CONFIG.get("postgresql.user"),
password=CONFIG.get("postgresql.password"),
host=CONFIG.get("postgresql.host"),
port=CONFIG.get_int("postgresql.port"),
port=int(CONFIG.get("postgresql.port")),
sslmode=CONFIG.get("postgresql.sslmode"),
sslrootcert=CONFIG.get("postgresql.sslrootcert"),
sslcert=CONFIG.get("postgresql.sslcert"),

View File

@ -10,8 +10,6 @@ from django.contrib.sessions.exceptions import SessionInterrupted
from django.contrib.sessions.middleware import SessionMiddleware as UpstreamSessionMiddleware
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.middleware.csrf import CSRF_SESSION_KEY
from django.middleware.csrf import CsrfViewMiddleware as UpstreamCsrfViewMiddleware
from django.utils.cache import patch_vary_headers
from django.utils.http import http_date
from jwt import PyJWTError, decode, encode
@ -133,29 +131,6 @@ class SessionMiddleware(UpstreamSessionMiddleware):
return response
class CsrfViewMiddleware(UpstreamCsrfViewMiddleware):
"""Dynamically set secure depending if the upstream connection is TLS or not"""
def _set_csrf_cookie(self, request: HttpRequest, response: HttpResponse):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META["CSRF_COOKIE"]:
request.session[CSRF_SESSION_KEY] = request.META["CSRF_COOKIE"]
else:
secure = SessionMiddleware.is_secure(request)
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=secure,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ("Cookie",))
class ChannelsLoggingMiddleware:
"""Logging middleware for channels"""

View File

@ -66,6 +66,7 @@ INSTALLED_APPS = [
"authentik.crypto",
"authentik.events",
"authentik.flows",
"authentik.lib",
"authentik.outposts",
"authentik.policies.dummy",
"authentik.policies.event_matcher",
@ -190,14 +191,14 @@ if CONFIG.get_bool("redis.tls", False):
_redis_url = (
f"{_redis_protocol_prefix}:"
f"{quote_plus(CONFIG.get('redis.password'))}@{quote_plus(CONFIG.get('redis.host'))}:"
f"{CONFIG.get_int('redis.port')}"
f"{int(CONFIG.get('redis.port'))}"
)
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"{_redis_url}/{CONFIG.get('redis.db')}",
"TIMEOUT": CONFIG.get_int("redis.cache_timeout", 300),
"TIMEOUT": int(CONFIG.get("redis.cache_timeout", 300)),
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
"KEY_PREFIX": "authentik_cache",
}
@ -225,7 +226,7 @@ MIDDLEWARE = [
"authentik.events.middleware.AuditMiddleware",
"django.middleware.security.SecurityMiddleware",
"django.middleware.common.CommonMiddleware",
"authentik.root.middleware.CsrfViewMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"authentik.core.middleware.ImpersonateMiddleware",
@ -274,7 +275,7 @@ DATABASES = {
"NAME": CONFIG.get("postgresql.name"),
"USER": CONFIG.get("postgresql.user"),
"PASSWORD": CONFIG.get("postgresql.password"),
"PORT": CONFIG.get_int("postgresql.port"),
"PORT": int(CONFIG.get("postgresql.port")),
"SSLMODE": CONFIG.get("postgresql.sslmode"),
"SSLROOTCERT": CONFIG.get("postgresql.sslrootcert"),
"SSLCERT": CONFIG.get("postgresql.sslcert"),
@ -293,12 +294,12 @@ if CONFIG.get_bool("postgresql.use_pgbouncer", False):
# loads the config directly from CONFIG
# See authentik/stages/email/models.py, line 105
EMAIL_HOST = CONFIG.get("email.host")
EMAIL_PORT = CONFIG.get_int("email.port")
EMAIL_PORT = int(CONFIG.get("email.port"))
EMAIL_HOST_USER = CONFIG.get("email.username")
EMAIL_HOST_PASSWORD = CONFIG.get("email.password")
EMAIL_USE_TLS = CONFIG.get_bool("email.use_tls", False)
EMAIL_USE_SSL = CONFIG.get_bool("email.use_ssl", False)
EMAIL_TIMEOUT = CONFIG.get_int("email.timeout")
EMAIL_TIMEOUT = int(CONFIG.get("email.timeout"))
DEFAULT_FROM_EMAIL = CONFIG.get("email.from")
SERVER_EMAIL = DEFAULT_FROM_EMAIL
EMAIL_SUBJECT_PREFIX = "[authentik] "
@ -402,7 +403,6 @@ LOG_PRE_CHAIN = [
structlog.stdlib.add_logger_name,
structlog.processors.TimeStamper(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
]
LOGGING = {

View File

@ -44,11 +44,7 @@ class LDAPSourceSerializer(SourceSerializer):
sources = sources.exclude(pk=self.instance.pk)
if sources.exists():
raise ValidationError(
{
"sync_users_password": (
"Only a single LDAP Source with password synchronization is allowed"
)
}
"Only a single LDAP Source with password synchronization is allowed"
)
return super().validate(attrs)

View File

@ -3,10 +3,7 @@ from django.core.management.base import BaseCommand
from structlog.stdlib import get_logger
from authentik.sources.ldap.models import LDAPSource
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
from authentik.sources.ldap.tasks import ldap_sync_paginator
from authentik.sources.ldap.tasks import ldap_sync_single
LOGGER = get_logger()
@ -23,10 +20,4 @@ class Command(BaseCommand):
if not source:
LOGGER.warning("Source does not exist", slug=source_slug)
continue
tasks = (
ldap_sync_paginator(source, UserLDAPSynchronizer)
+ ldap_sync_paginator(source, GroupLDAPSynchronizer)
+ ldap_sync_paginator(source, MembershipLDAPSynchronizer)
)
for task in tasks:
task()
ldap_sync_single(source)

Some files were not shown because too many files have changed in this diff Show More