Compare commits

..

2 Commits

Author SHA1 Message Date
b8c96c88f5 Update Makefile
Signed-off-by: Teffen Ellis <592134+GirlBossRush@users.noreply.github.com>
2025-05-26 16:55:39 +02:00
16019b8585 core: Prep OpenAPI generators for NPM Workspaces. 2025-05-03 01:57:12 +02:00
771 changed files with 18686 additions and 46223 deletions

View File

@ -1,16 +1,16 @@
[bumpversion]
current_version = 2025.6.0
current_version = 2025.4.0
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
serialize =
serialize =
{major}.{minor}.{patch}-{rc_t}{rc_n}
{major}.{minor}.{patch}
message = release: {new_version}
tag_name = version/{new_version}
[bumpversion:part:rc_t]
values =
values =
rc
final
optional_value = final

View File

@ -36,7 +36,7 @@ runs:
with:
go-version-file: "go.mod"
- name: Setup docker cache
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
uses: ScribeMD/docker-cache@0.5.0
with:
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies

View File

@ -23,13 +23,7 @@ updates:
- package-ecosystem: npm
directories:
- "/web"
- "/web/packages/sfe"
- "/web/packages/core"
- "/web/packages/esbuild-plugin-live-reload"
- "/packages/prettier-config"
- "/packages/tsconfig"
- "/packages/docusaurus-config"
- "/packages/eslint-config"
- "/web/sfe"
schedule:
interval: daily
time: "04:00"
@ -74,9 +68,6 @@ updates:
wdio:
patterns:
- "@wdio/*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm
directory: "/website"
schedule:
@ -97,9 +88,6 @@ updates:
- "swc-*"
- "lightningcss*"
- "@rspack/binding*"
goauthentik:
patterns:
- "@goauthentik/*"
- package-ecosystem: npm
directory: "/lifecycle/aws"
schedule:

View File

@ -53,7 +53,6 @@ jobs:
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@ -62,7 +62,6 @@ jobs:
psql:
- 15-alpine
- 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
@ -117,7 +116,6 @@ jobs:
psql:
- 15-alpine
- 16-alpine
- 17-alpine
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v4
@ -202,7 +200,7 @@ jobs:
uses: actions/cache@v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**') }}
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
@ -210,7 +208,6 @@ jobs:
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run e2e
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}

View File

@ -29,7 +29,7 @@ jobs:
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@v8
uses: golangci/golangci-lint-action@v7
with:
version: latest
args: --timeout 5000s --verbose

View File

@ -37,7 +37,6 @@ jobs:
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@ -53,7 +53,6 @@ jobs:
body: ${{ steps.compress.outputs.markdown }}
delete-branch: true
signoff: true
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with:

View File

@ -7,7 +7,6 @@ on:
- packages/eslint-config/**
- packages/prettier-config/**
- packages/tsconfig/**
- web/packages/esbuild-plugin-live-reload/**
workflow_dispatch:
jobs:
publish:
@ -17,28 +16,27 @@ jobs:
fail-fast: false
matrix:
package:
- packages/docusaurus-config
- packages/eslint-config
- packages/prettier-config
- packages/tsconfig
- web/packages/esbuild-plugin-live-reload
- docusaurus-config
- eslint-config
- prettier-config
- tsconfig
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-node@v4
with:
node-version-file: ${{ matrix.package }}/package.json
node-version-file: packages/${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
with:
files: |
${{ matrix.package }}/package.json
packages/${{ matrix.package }}/package.json
- name: Publish package
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }}
working-directory: packages/${{ matrix.package}}
run: |
npm ci
npm run build

View File

@ -52,6 +52,3 @@ jobs:
body: "core, web: update translations"
delete-branch: true
signoff: true
labels: dependencies
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>

View File

@ -15,7 +15,6 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps:
- uses: actions/checkout@v4
- id: generate_token
uses: tibdex/github-app-token@v2
with:
@ -26,13 +25,23 @@ jobs:
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
title=$(gh pr view ${{ github.event.pull_request.number }} --json "title" -q ".title")
title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
gh pr edit ${{ github.event.pull_request.number }} -t "translate: ${{ steps.title.outputs.title }}" --add-label dependencies
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS website-builder
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS website-builder
ENV NODE_ENV=production
@ -20,7 +20,7 @@ COPY ./SECURITY.md /work/
RUN npm run build-bundled
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/library/node:24 AS web-builder
FROM --platform=${BUILDPLATFORM} docker.io/library/node:22 AS web-builder
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
@ -40,8 +40,7 @@ COPY ./web /work/web/
COPY ./website /work/website/
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build && \
npm run build:sfe
RUN npm run build
# Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.24-bookworm AS go-builder
@ -86,17 +85,18 @@ FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
ENV GEOIPUPDATE_VERBOSE="1"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Download uv
FROM ghcr.io/astral-sh/uv:0.7.8 AS uv
FROM ghcr.io/astral-sh/uv:0.7.2 AS uv
# Stage 6: Base python image
FROM ghcr.io/goauthentik/fips-python:3.13.3-slim-bookworm-fips AS python-base
FROM ghcr.io/goauthentik/fips-python:3.12.10-slim-bookworm-fips AS python-base
ENV VENV_PATH="/ak-root/.venv" \
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \

View File

@ -121,7 +121,7 @@ gen-clean-ts: ## Remove generated API client for Typescript
rm -rf ${PWD}/${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/
gen-clean-go: ## Remove generated API client for Go
gen-clean-go: ## Remove generated API client for Go
mkdir -p ${PWD}/${GEN_API_GO}
ifneq ($(wildcard ${PWD}/${GEN_API_GO}/.*),)
make -C ${PWD}/${GEN_API_GO} clean
@ -129,39 +129,23 @@ else
rm -rf ${PWD}/${GEN_API_GO}
endif
gen-clean-py: ## Remove generated API client for Python
gen-clean-py: ## Remove generated API client for Python
rm -rf ${PWD}/${GEN_API_PY}/
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
-c /local/scripts/api-ts-config.yaml \
--additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
mkdir -p web/node_modules/@goauthentik/api
cd ${PWD}/${GEN_API_TS} && npm i
\cp -rf ${PWD}/${GEN_API_TS}/* web/node_modules/@goauthentik/api
./scripts/gen-client-ts.mjs
npm i --prefix ${GEN_API_TS}
cd ./${GEN_API_TS} && npm link
cd ./web && npm link @goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
-i /local/schema.yml \
-g python \
-o /local/${GEN_API_PY} \
-c /local/scripts/api-py-config.yaml \
--additional-properties=packageVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
./scripts/gen-client-py.mjs
pip install ./${GEN_API_PY}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO}
@ -263,14 +247,3 @@ ci-ruff: ci--meta-debug
ci-codespell: ci--meta-debug
uv run codespell -s
ci-bandit: ci--meta-debug
uv run bandit -r $(PY_SOURCES)
ci-pending-migrations: ci--meta-debug
uv run ak makemigrations --check
ci-test: ci--meta-debug
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
uv run coverage report
uv run coverage xml

View File

@ -42,4 +42,4 @@ See [SECURITY.md](SECURITY.md)
## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).

View File

@ -2,7 +2,7 @@
from os import environ
__version__ = "2025.6.0"
__version__ = "2025.4.0"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -1,12 +1,9 @@
"""API Authentication"""
from hmac import compare_digest
from pathlib import Path
from tempfile import gettempdir
from typing import Any
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from drf_spectacular.extensions import OpenApiAuthenticationExtension
from rest_framework.authentication import BaseAuthentication, get_authorization_header
from rest_framework.exceptions import AuthenticationFailed
@ -14,17 +11,11 @@ from rest_framework.request import Request
from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User, UserTypes
from authentik.core.models import Token, TokenIntents, User
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger()
_tmp = Path(gettempdir())
try:
with open(_tmp / "authentik-core-ipc.key") as _f:
ipc_key = _f.read()
except OSError:
ipc_key = None
def validate_auth(header: bytes) -> str | None:
@ -82,11 +73,6 @@ def auth_user_lookup(raw_header: bytes) -> User | None:
if user:
CTX_AUTH_VIA.set("secret_key")
return user
# then try to auth via secret key (for embedded outpost/etc)
user = token_ipc(auth_credentials)
if user:
CTX_AUTH_VIA.set("ipc")
return user
raise AuthenticationFailed("Token invalid/expired")
@ -104,43 +90,6 @@ def token_secret_key(value: str) -> User | None:
return outpost.user
class IPCUser(AnonymousUser):
"""'Virtual' user for IPC communication between authentik core and the authentik router"""
username = "authentik:system"
is_active = True
is_superuser = True
@property
def type(self):
return UserTypes.INTERNAL_SERVICE_ACCOUNT
def has_perm(self, perm, obj=None):
return True
def has_perms(self, perm_list, obj=None):
return True
def has_module_perms(self, module):
return True
@property
def is_anonymous(self):
return False
@property
def is_authenticated(self):
return True
def token_ipc(value: str) -> User | None:
"""Check if the token is the secret key
and return the service account for the managed outpost"""
if not ipc_key or not compare_digest(value, ipc_key):
return None
return IPCUser()
class TokenAuthentication(BaseAuthentication):
"""Token-based authentication using HTTP Bearer authentication"""

View File

@ -54,7 +54,7 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints.
Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>

View File

@ -164,7 +164,9 @@ class BlueprintEntry:
"""Get the blueprint model, with yaml tags resolved if present"""
return str(self.tag_resolver(self.model, blueprint))
def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]:
def get_permissions(
self, blueprint: "Blueprint"
) -> Generator[BlueprintEntryPermission, None, None]:
"""Get permissions of this entry, with all yaml tags resolved"""
for perm in self.permissions:
yield BlueprintEntryPermission(

View File

@ -59,7 +59,6 @@ class BrandSerializer(ModelSerializer):
"flow_device_code",
"default_application",
"web_certificate",
"client_certificates",
"attributes",
]
extra_kwargs = {
@ -121,7 +120,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"domain",
"branding_title",
"web_certificate__name",
"client_certificates__name",
]
filterset_fields = [
"brand_uuid",
@ -138,7 +136,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"flow_user_settings",
"flow_device_code",
"web_certificate",
"client_certificates",
]
ordering = ["domain"]

View File

@ -1,37 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-19 15:09
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0009_brand_branding_default_flow_background"),
("authentik_crypto", "0004_alter_certificatekeypair_name"),
]
operations = [
migrations.AddField(
model_name="brand",
name="client_certificates",
field=models.ManyToManyField(
blank=True,
default=None,
help_text="Certificates used for client authentication.",
to="authentik_crypto.certificatekeypair",
),
),
migrations.AlterField(
model_name="brand",
name="web_certificate",
field=models.ForeignKey(
default=None,
help_text="Web Certificate used by the authentik Core webserver.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
related_name="+",
to="authentik_crypto.certificatekeypair",
),
),
]

View File

@ -73,13 +73,6 @@ class Brand(SerializerModel):
default=None,
on_delete=models.SET_DEFAULT,
help_text=_("Web Certificate used by the authentik Core webserver."),
related_name="+",
)
client_certificates = models.ManyToManyField(
CertificateKeyPair,
default=None,
blank=True,
help_text=_("Certificates used for client authentication."),
)
attributes = models.JSONField(default=dict, blank=True)

View File

@ -5,10 +5,10 @@ from typing import Any
from django.db.models import F, Q
from django.db.models import Value as V
from django.http.request import HttpRequest
from sentry_sdk import get_current_span
from authentik import get_full_version
from authentik.brands.models import Brand
from authentik.lib.sentry import get_http_meta
from authentik.tenants.models import Tenant
_q_default = Q(default=True)
@ -32,9 +32,13 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"""Context Processor that injects brand object into every template"""
brand = getattr(request, "brand", DEFAULT_BRAND)
tenant = getattr(request, "tenant", Tenant())
trace = ""
span = get_current_span()
if span:
trace = span.to_traceparent()
return {
"brand": brand,
"footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()},
"sentry_trace": trace,
"version": get_full_version(),
}

View File

@ -99,17 +99,18 @@ class GroupSerializer(ModelSerializer):
if superuser
else "authentik_core.disable_group_superuser"
)
if self.instance or superuser:
has_perm = user.has_perm(perm) or user.has_perm(perm, self.instance)
if not has_perm:
raise ValidationError(
_(
(
"User does not have permission to set "
"superuser status to {superuser_status}."
).format_map({"superuser_status": superuser})
)
has_perm = user.has_perm(perm)
if self.instance and not has_perm:
has_perm = user.has_perm(perm, self.instance)
if not has_perm:
raise ValidationError(
_(
(
"User does not have permission to set "
"superuser status to {superuser_status}."
).format_map({"superuser_status": superuser})
)
)
return superuser
class Meta:

View File

@ -84,7 +84,6 @@ from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.avatars import get_avatar
from authentik.rbac.decorators import permission_required
from authentik.rbac.models import get_permission_choices
from authentik.stages.email.flow import pickle_flow_token_for_email
from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage
@ -452,7 +451,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def _create_recovery_link(self, for_email=False) -> tuple[str, Token]:
def _create_recovery_link(self) -> tuple[str, Token]:
"""Create a recovery link (when the current brand has a recovery flow set),
that can either be shown to an admin or sent to the user directly"""
brand: Brand = self.request._request.brand
@ -474,16 +473,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
raise ValidationError(
{"non_field_errors": "Recovery flow not applicable to user"}
) from None
_plan = FlowToken.pickle(plan)
if for_email:
_plan = pickle_flow_token_for_email(plan)
token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset",
defaults={
"user": user,
"flow": flow,
"_plan": _plan,
"revoke_on_execution": not for_email,
"_plan": FlowToken.pickle(plan),
},
)
querystring = urlencode({QS_KEY_TOKEN: token.key})
@ -653,7 +648,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if for_user.email == "":
LOGGER.debug("User doesn't have an email address")
raise ValidationError({"non_field_errors": "User does not have an email address set."})
link, token = self._create_recovery_link(for_email=True)
link, token = self._create_recovery_link()
# Lookup the email stage to assure the current user can access it
stages = get_objects_for_user(
request.user, "authentik_stages_email.view_emailstage"

View File

@ -2,7 +2,6 @@
from django.apps import apps
from django.contrib.auth.management import create_permissions
from django.core.management import call_command
from django.core.management.base import BaseCommand, no_translations
from guardian.management import create_anonymous_user
@ -17,10 +16,6 @@ class Command(BaseCommand):
"""Check permissions for all apps"""
for tenant in Tenant.objects.filter(ready=True):
with tenant:
# See https://code.djangoproject.com/ticket/28417
# Remove potential lingering old permissions
call_command("remove_stale_contenttypes", "--no-input")
for app in apps.get_app_configs():
self.stdout.write(f"Checking app {app.name} ({app.label})\n")
create_permissions(app, verbosity=0)

View File

@ -31,10 +31,7 @@ class PickleSerializer:
def loads(self, data):
"""Unpickle data to be loaded from redis"""
try:
return pickle.loads(data) # nosec
except Exception:
return {}
return pickle.loads(data) # nosec
def _migrate_session(
@ -79,7 +76,6 @@ def _migrate_session(
AuthenticatedSession.objects.using(db_alias).create(
session=session,
user=old_auth_session.user,
uuid=old_auth_session.uuid,
)

View File

@ -1,103 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-14 11:15
from django.apps.registry import Apps, apps as global_apps
from django.db import migrations
from django.contrib.contenttypes.management import create_contenttypes
from django.contrib.auth.management import create_permissions
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_authenticated_session_permissions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
"""Migrate permissions from OldAuthenticatedSession to AuthenticatedSession"""
db_alias = schema_editor.connection.alias
# `apps` here is just an instance of `django.db.migrations.state.AppConfigStub`, we need the
# real config for creating permissions and content types
authentik_core_config = global_apps.get_app_config("authentik_core")
# These are only ran by django after all migrations, but we need them right now.
# `global_apps` is needed,
create_permissions(authentik_core_config, using=db_alias, verbosity=1)
create_contenttypes(authentik_core_config, using=db_alias, verbosity=1)
# But from now on, this is just a regular migration, so use `apps`
Permission = apps.get_model("auth", "Permission")
ContentType = apps.get_model("contenttypes", "ContentType")
try:
old_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="oldauthenticatedsession"
)
new_ct = ContentType.objects.using(db_alias).get(
app_label="authentik_core", model="authenticatedsession"
)
except ContentType.DoesNotExist:
# This should exist at this point, but if not, let's cut our losses
return
# Get all permissions for the old content type
old_perms = Permission.objects.using(db_alias).filter(content_type=old_ct)
# Create equivalent permissions for the new content type
for old_perm in old_perms:
new_perm = (
Permission.objects.using(db_alias)
.filter(
content_type=new_ct,
codename=old_perm.codename,
)
.first()
)
if not new_perm:
# This should exist at this point, but if not, let's cut our losses
continue
# Global user permissions
User = apps.get_model("authentik_core", "User")
User.user_permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Global role permissions
DjangoGroup = apps.get_model("auth", "Group")
DjangoGroup.permissions.through.objects.using(db_alias).filter(
permission=old_perm
).all().update(permission=new_perm)
# Object user permissions
UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
UserObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
# Object role permissions
GroupObjectPermission = apps.get_model("guardian", "GroupObjectPermission")
GroupObjectPermission.objects.using(db_alias).filter(permission=old_perm).all().update(
permission=new_perm, content_type=new_ct
)
def remove_old_authenticated_session_content_type(
apps: Apps, schema_editor: BaseDatabaseSchemaEditor
):
db_alias = schema_editor.connection.alias
ContentType = apps.get_model("contenttypes", "ContentType")
ContentType.objects.using(db_alias).filter(model="oldauthenticatedsession").delete()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0047_delete_oldauthenticatedsession"),
]
operations = [
migrations.RunPython(
code=migrate_authenticated_session_permissions,
reverse_code=migrations.RunPython.noop,
),
migrations.RunPython(
code=remove_old_authenticated_session_content_type,
reverse_code=migrations.RunPython.noop,
),
]

View File

@ -21,9 +21,7 @@
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
{% block head %}
{% endblock %}
{% for key, value in html_meta.items %}
<meta name="{{key}}" content="{{ value }}" />
{% endfor %}
<meta name="sentry-trace" content="{{ sentry_trace }}" />
</head>
<body>
{% block body %}

View File

@ -124,16 +124,6 @@ class TestGroupsAPI(APITestCase):
{"is_superuser": ["User does not have permission to set superuser status to True."]},
)
def test_superuser_no_perm_no_superuser(self):
"""Test creating a group without permission and without superuser flag"""
assign_perm("authentik_core.add_group", self.login_user)
self.client.force_login(self.login_user)
res = self.client.post(
reverse("authentik_api:group-list"),
data={"name": generate_id(), "is_superuser": False},
)
self.assertEqual(res.status_code, 201)
def test_superuser_update_no_perm(self):
"""Test updating a superuser group without permission"""
group = Group.objects.create(name=generate_id(), is_superuser=True)

View File

@ -30,7 +30,6 @@ from structlog.stdlib import get_logger
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.core.models import UserTypes
from authentik.crypto.apps import MANAGED_KEY
from authentik.crypto.builder import CertificateBuilder, PrivateKeyAlg
from authentik.crypto.models import CertificateKeyPair
@ -273,12 +272,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
def view_certificate(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs certificate and log access"""
certificate: CertificateKeyPair = self.get_object()
if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="certificate",
).from_http(request)
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="certificate",
).from_http(request)
if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse(
@ -304,12 +302,11 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
def view_private_key(self, request: Request, pk: str) -> Response:
"""Return certificate-key pairs private key and log access"""
certificate: CertificateKeyPair = self.get_object()
if request.user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="private_key",
).from_http(request)
Event.new( # noqa # nosec
EventAction.SECRET_VIEW,
secret=certificate,
type="private_key",
).from_http(request)
if "download" in request.query_params:
# Mime type from https://pki-tutorial.readthedocs.io/en/latest/mime.html
response = HttpResponse(certificate.key_data, content_type="application/x-pem-file")

View File

@ -132,14 +132,13 @@ class LicenseKey:
"""Get a summarized version of all (not expired) licenses"""
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
for lic in License.objects.all():
if lic.is_valid:
total.internal_users += lic.internal_users
total.external_users += lic.external_users
total.license_flags.extend(lic.status.license_flags)
total.internal_users += lic.internal_users
total.external_users += lic.external_users
exp_ts = int(mktime(lic.expiry.timetuple()))
if total.exp == 0:
total.exp = exp_ts
total.exp = max(total.exp, exp_ts)
total.license_flags.extend(lic.status.license_flags)
return total
@staticmethod

View File

@ -39,10 +39,6 @@ class License(SerializerModel):
internal_users = models.BigIntegerField()
external_users = models.BigIntegerField()
@property
def is_valid(self) -> bool:
return self.expiry >= now()
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.api import LicenseSerializer

View File

@ -25,7 +25,7 @@ class GoogleWorkspaceGroupClient(
"""Google client for groups"""
connection_type = GoogleWorkspaceProviderGroup
connection_attr = "googleworkspaceprovidergroup_set"
connection_type_query = "group"
can_discover = True
def __init__(self, provider: GoogleWorkspaceProvider) -> None:

View File

@ -20,7 +20,7 @@ class GoogleWorkspaceUserClient(GoogleWorkspaceSyncClient[User, GoogleWorkspaceP
"""Sync authentik users into google workspace"""
connection_type = GoogleWorkspaceProviderUser
connection_attr = "googleworkspaceprovideruser_set"
connection_type_query = "user"
can_discover = True
def __init__(self, provider: GoogleWorkspaceProvider) -> None:

View File

@ -132,11 +132,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
if type == User:
# Get queryset of all users with consistent ordering
# according to the provider's settings
base = (
User.objects.prefetch_related("googleworkspaceprovideruser_set")
.all()
.exclude_anonymous()
)
base = User.objects.all().exclude_anonymous()
if self.exclude_users_service_account:
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
@ -146,11 +142,7 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
return base.order_by("pk")
if type == Group:
# Get queryset of all groups with consistent ordering
return (
Group.objects.prefetch_related("googleworkspaceprovidergroup_set")
.all()
.order_by("pk")
)
return Group.objects.all().order_by("pk")
raise ValueError(f"Invalid type {type}")
def google_credentials(self):

View File

@ -29,7 +29,7 @@ class MicrosoftEntraGroupClient(
"""Microsoft client for groups"""
connection_type = MicrosoftEntraProviderGroup
connection_attr = "microsoftentraprovidergroup_set"
connection_type_query = "group"
can_discover = True
def __init__(self, provider: MicrosoftEntraProvider) -> None:

View File

@ -24,7 +24,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
"""Sync authentik users into microsoft entra"""
connection_type = MicrosoftEntraProviderUser
connection_attr = "microsoftentraprovideruser_set"
connection_type_query = "user"
can_discover = True
def __init__(self, provider: MicrosoftEntraProvider) -> None:

View File

@ -121,11 +121,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
if type == User:
# Get queryset of all users with consistent ordering
# according to the provider's settings
base = (
User.objects.prefetch_related("microsoftentraprovideruser_set")
.all()
.exclude_anonymous()
)
base = User.objects.all().exclude_anonymous()
if self.exclude_users_service_account:
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
@ -135,11 +131,7 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
return base.order_by("pk")
if type == Group:
# Get queryset of all groups with consistent ordering
return (
Group.objects.prefetch_related("microsoftentraprovidergroup_set")
.all()
.order_by("pk")
)
return Group.objects.all().order_by("pk")
raise ValueError(f"Invalid type {type}")
def microsoft_credentials(self):

View File

@ -19,7 +19,6 @@ TENANT_APPS = [
"authentik.enterprise.providers.microsoft_entra",
"authentik.enterprise.providers.ssf",
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
"authentik.enterprise.stages.mtls",
"authentik.enterprise.stages.source",
]

View File

@ -1,31 +0,0 @@
"""Mutual TLS Stage API Views"""
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.stages.mtls.models import MutualTLSStage
from authentik.flows.api.stages import StageSerializer
class MutualTLSStageSerializer(EnterpriseRequiredMixin, StageSerializer):
"""MutualTLSStage Serializer"""
class Meta:
model = MutualTLSStage
fields = StageSerializer.Meta.fields + [
"mode",
"certificate_authorities",
"cert_attribute",
"user_attribute",
]
class MutualTLSStageViewSet(UsedByMixin, ModelViewSet):
"""MutualTLSStage Viewset"""
queryset = MutualTLSStage.objects.all()
serializer_class = MutualTLSStageSerializer
filterset_fields = "__all__"
ordering = ["name"]
search_fields = ["name"]

View File

@ -1,12 +0,0 @@
"""authentik stage app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseStageMTLSConfig(EnterpriseConfig):
"""authentik MTLS stage config"""
name = "authentik.enterprise.stages.mtls"
label = "authentik_stages_mtls"
verbose_name = "authentik Enterprise.Stages.MTLS"
default = True

View File

@ -1,68 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-19 18:29
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_crypto", "0004_alter_certificatekeypair_name"),
("authentik_flows", "0027_auto_20231028_1424"),
]
operations = [
migrations.CreateModel(
name="MutualTLSStage",
fields=[
(
"stage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_flows.stage",
),
),
(
"mode",
models.TextField(choices=[("optional", "Optional"), ("required", "Required")]),
),
(
"cert_attribute",
models.TextField(
choices=[
("subject", "Subject"),
("common_name", "Common Name"),
("email", "Email"),
]
),
),
(
"user_attribute",
models.TextField(choices=[("username", "Username"), ("email", "Email")]),
),
(
"certificate_authorities",
models.ManyToManyField(
blank=True,
default=None,
help_text="Configure certificate authorities to validate the certificate against. This option has a higher priority than the `client_certificate` option on `Brand`.",
to="authentik_crypto.certificatekeypair",
),
),
],
options={
"verbose_name": "Mutual TLS Stage",
"verbose_name_plural": "Mutual TLS Stages",
"permissions": [
("pass_outpost_certificate", "Permissions to pass Certificates for outposts.")
],
},
bases=("authentik_flows.stage",),
),
]

View File

@ -1,71 +0,0 @@
from django.db import models
from django.utils.translation import gettext_lazy as _
from rest_framework.serializers import Serializer
from authentik.crypto.models import CertificateKeyPair
from authentik.flows.models import Stage
from authentik.flows.stage import StageView
class TLSMode(models.TextChoices):
"""Modes the TLS Stage can operate in"""
OPTIONAL = "optional"
REQUIRED = "required"
class CertAttributes(models.TextChoices):
"""Certificate attribute used for user matching"""
SUBJECT = "subject"
COMMON_NAME = "common_name"
EMAIL = "email"
class UserAttributes(models.TextChoices):
"""User attribute for user matching"""
USERNAME = "username"
EMAIL = "email"
class MutualTLSStage(Stage):
"""Authenticate/enroll users using a client-certificate."""
mode = models.TextField(choices=TLSMode.choices)
certificate_authorities = models.ManyToManyField(
CertificateKeyPair,
default=None,
blank=True,
help_text=_(
"Configure certificate authorities to validate the certificate against. "
"This option has a higher priority than the `client_certificate` option on `Brand`."
),
)
cert_attribute = models.TextField(choices=CertAttributes.choices)
user_attribute = models.TextField(choices=UserAttributes.choices)
@property
def view(self) -> type[StageView]:
from authentik.enterprise.stages.mtls.stage import MTLSStageView
return MTLSStageView
@property
def serializer(self) -> type[Serializer]:
from authentik.enterprise.stages.mtls.api import MutualTLSStageSerializer
return MutualTLSStageSerializer
@property
def component(self) -> str:
return "ak-stage-mtls-form"
class Meta:
verbose_name = _("Mutual TLS Stage")
verbose_name_plural = _("Mutual TLS Stages")
permissions = [
("pass_outpost_certificate", _("Permissions to pass Certificates for outposts.")),
]

View File

@ -1,230 +0,0 @@
from binascii import hexlify
from urllib.parse import unquote_plus
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.primitives import hashes
from cryptography.x509 import (
Certificate,
NameOID,
ObjectIdentifier,
UnsupportedGeneralNameType,
load_pem_x509_certificate,
)
from cryptography.x509.verification import PolicyBuilder, Store, VerificationError
from django.utils.translation import gettext_lazy as _
from authentik.brands.models import Brand
from authentik.core.models import User
from authentik.crypto.models import CertificateKeyPair
from authentik.enterprise.stages.mtls.models import (
CertAttributes,
MutualTLSStage,
TLSMode,
UserAttributes,
)
from authentik.flows.challenge import AccessDeniedChallenge
from authentik.flows.models import FlowDesignation
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
from authentik.flows.stage import ChallengeStageView
from authentik.root.middleware import ClientIPMiddleware
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
# All of these headers must only be accepted from "trusted" reverse proxies
# See internal/web/proxy.go:39
HEADER_PROXY_FORWARDED = "X-Forwarded-Client-Cert"
HEADER_NGINX_FORWARDED = "SSL-Client-Cert"
HEADER_TRAEFIK_FORWARDED = "X-Forwarded-TLS-Client-Cert"
HEADER_OUTPOST_FORWARDED = "X-Authentik-Outpost-Certificate"
PLAN_CONTEXT_CERTIFICATE = "certificate"
class MTLSStageView(ChallengeStageView):
def __parse_single_cert(self, raw: str | None) -> list[Certificate]:
"""Helper to parse a single certificate"""
if not raw:
return []
try:
cert = load_pem_x509_certificate(unquote_plus(raw).encode())
return [cert]
except ValueError as exc:
self.logger.info("Failed to parse certificate", exc=exc)
return []
def _parse_cert_xfcc(self) -> list[Certificate]:
"""Parse certificates in the format given to us in
the format of the authentik router/envoy"""
xfcc_raw = self.request.headers.get(HEADER_PROXY_FORWARDED)
if not xfcc_raw:
return []
certs = []
for r_cert in xfcc_raw.split(","):
el = r_cert.split(";")
raw_cert = {k.split("=")[0]: k.split("=")[1] for k in el}
if "Cert" not in raw_cert:
continue
certs.extend(self.__parse_single_cert(raw_cert["Cert"]))
return certs
def _parse_cert_nginx(self) -> list[Certificate]:
"""Parse certificates in the format nginx-ingress gives to us"""
sslcc_raw = self.request.headers.get(HEADER_NGINX_FORWARDED)
return self.__parse_single_cert(sslcc_raw)
def _parse_cert_traefik(self) -> list[Certificate]:
"""Parse certificates in the format traefik gives to us"""
ftcc_raw = self.request.headers.get(HEADER_TRAEFIK_FORWARDED)
return self.__parse_single_cert(ftcc_raw)
def _parse_cert_outpost(self) -> list[Certificate]:
"""Parse certificates in the format outposts give to us. Also authenticates
the outpost to ensure it has the permission to do so"""
user = ClientIPMiddleware.get_outpost_user(self.request)
if not user:
return []
if not user.has_perm(
"pass_outpost_certificate", self.executor.current_stage
) and not user.has_perm("authentik_stages_mtls.pass_outpost_certificate"):
return []
outpost_raw = self.request.headers.get(HEADER_OUTPOST_FORWARDED)
return self.__parse_single_cert(outpost_raw)
def get_authorities(self) -> list[CertificateKeyPair] | None:
# We can't access `certificate_authorities` on `self.executor.current_stage`, as that would
# load the certificate into the directly referenced foreign key, which we have to pickle
# as part of the flow plan, and cryptography certs can't be pickled
stage: MutualTLSStage = (
MutualTLSStage.objects.filter(pk=self.executor.current_stage.pk)
.prefetch_related("certificate_authorities")
.first()
)
if stage.certificate_authorities.exists():
return stage.certificate_authorities.order_by("name")
brand: Brand = self.request.brand
if brand.client_certificates.exists():
return brand.client_certificates.order_by("name")
return None
def validate_cert(self, authorities: list[CertificateKeyPair], certs: list[Certificate]):
authorities_cert = [x.certificate for x in authorities]
for _cert in certs:
try:
PolicyBuilder().store(Store(authorities_cert)).build_client_verifier().verify(
_cert, []
)
return _cert
except (
InvalidSignature,
TypeError,
ValueError,
VerificationError,
UnsupportedGeneralNameType,
) as exc:
self.logger.warning("Discarding invalid certificate", cert=_cert, exc=exc)
continue
return None
def check_if_user(self, cert: Certificate):
stage: MutualTLSStage = self.executor.current_stage
cert_attr = None
user_attr = None
match stage.cert_attribute:
case CertAttributes.SUBJECT:
cert_attr = cert.subject.rfc4514_string()
case CertAttributes.COMMON_NAME:
cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME)
case CertAttributes.EMAIL:
cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS)
match stage.user_attribute:
case UserAttributes.USERNAME:
user_attr = "username"
case UserAttributes.EMAIL:
user_attr = "email"
if not user_attr or not cert_attr:
return None
return User.objects.filter(**{user_attr: cert_attr}).first()
def _cert_to_dict(self, cert: Certificate) -> dict:
"""Represent a certificate in a dictionary, as certificate objects cannot be pickled"""
return {
"serial_number": str(cert.serial_number),
"subject": cert.subject.rfc4514_string(),
"issuer": cert.issuer.rfc4514_string(),
"fingerprint_sha256": hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8"),
"fingerprint_sha1": hexlify(cert.fingerprint(hashes.SHA1()), ":").decode( # nosec
"utf-8"
),
}
def auth_user(self, user: User, cert: Certificate):
self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = user
self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD, "mtls")
self.executor.plan.context.setdefault(PLAN_CONTEXT_METHOD_ARGS, {})
self.executor.plan.context[PLAN_CONTEXT_METHOD_ARGS].update(
{"certificate": self._cert_to_dict(cert)}
)
def enroll_prepare_user(self, cert: Certificate):
self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {})
self.executor.plan.context[PLAN_CONTEXT_PROMPT].update(
{
"email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS),
"name": self.get_cert_attribute(cert, NameOID.COMMON_NAME),
}
)
self.executor.plan.context[PLAN_CONTEXT_CERTIFICATE] = self._cert_to_dict(cert)
def get_cert_attribute(self, cert: Certificate, oid: ObjectIdentifier) -> str | None:
attr = cert.subject.get_attributes_for_oid(oid)
if len(attr) < 1:
return None
return str(attr[0].value)
def dispatch(self, request, *args, **kwargs):
stage: MutualTLSStage = self.executor.current_stage
certs = [
*self._parse_cert_xfcc(),
*self._parse_cert_nginx(),
*self._parse_cert_traefik(),
*self._parse_cert_outpost(),
]
authorities = self.get_authorities()
if not authorities:
self.logger.warning("No Certificate authority found")
if stage.mode == TLSMode.OPTIONAL:
return self.executor.stage_ok()
if stage.mode == TLSMode.REQUIRED:
return super().dispatch(request, *args, **kwargs)
cert = self.validate_cert(authorities, certs)
if not cert and stage.mode == TLSMode.REQUIRED:
self.logger.warning("Client certificate required but no certificates given")
return super().dispatch(
request,
*args,
error_message=_("Certificate required but no certificate was given."),
**kwargs,
)
if not cert and stage.mode == TLSMode.OPTIONAL:
self.logger.info("No certificate given, continuing")
return self.executor.stage_ok()
existing_user = self.check_if_user(cert)
if self.executor.flow.designation == FlowDesignation.ENROLLMENT:
self.enroll_prepare_user(cert)
elif existing_user:
self.auth_user(existing_user, cert)
else:
return super().dispatch(
request, *args, error_message=_("No user found for certificate."), **kwargs
)
return self.executor.stage_ok()
def get_challenge(self, *args, error_message: str | None = None, **kwargs):
return AccessDeniedChallenge(
data={
"component": "ak-stage-access-denied",
"error_message": str(error_message or "Unknown error"),
}
)

View File

@ -1,31 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFXDCCA0SgAwIBAgIUBmV7zREyC1SPr72/75/L9zpwV18wDQYJKoZIhvcNAQEL
BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl
bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNDI3MTgzMDUwWhcNMzUw
MzA3MTgzMDUwWjBGMRowGAYDVQQDDBFhdXRoZW50aWsgVGVzdCBDQTESMBAGA1UE
CgwJYXV0aGVudGlrMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCAiIwDQYJKoZIhvcN
AQEBBQADggIPADCCAgoCggIBAMc0NxZj7j1mPu0aRToo8oMPdC3T99xgxnqdr18x
LV4pWyi/YLghgZHqNQY2xNP6JIlSeUZD6KFUYT2sPL4Av/zSg5zO8bl+/lf7ckje
O1/Bt5A8xtL0CpmpMDGiI6ibdDElaywM6AohisbxrV29pygSKGq2wugF/urqGtE+
5z4y5Kt6qMdKkd0iXT+WagbQTIUlykFKgB0+qqTLzDl01lVDa/DoLl8Hqp45mVx2
pqrGsSa3TCErLIv9hUlZklF7A8UV4ZB4JL20UKcP8dKzQClviNie17tpsUpOuy3A
SQ6+guWTHTLJNCSdLn1xIqc5q+f5wd2dIDf8zXCTHj+Xp0bJE3Vgaq5R31K9+b+1
2dDWz1KcNJaLEnw2+b0O8M64wTMLxhqOv7QfLUr6Pmg1ZymghjLcZ6bnU9e31Vza
hlPKhxjqYQUC4Kq+oaYF6qdUeJy+dsYf0iDv5tTC+eReZDWIjxTPrNpwA773ZwT7
WVmL7ULGpuP2g9rNvFBcZiN+i6d7CUoN+jd/iRdo79lrI0dfXiyy4bYgW/2HeZfF
HaOsc1xsoqnJdWbWkX/ooyaCjAfm07kS3HiOzz4q3QW4wgGrwV8lEraLPxYYeOQu
YcGMOM8NfnVkjc8gmyXUxedCje5Vz/Tu5fKrQEInnCmXxVsWbwr/LzEjMKAM/ivY
0TXxAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0G
A1UdDgQWBBTa+Ns6QzqlNvnTGszkouQQtZnVJDANBgkqhkiG9w0BAQsFAAOCAgEA
NpJEDMXjuEIzSzafkxSshvjnt5sMYmzmvjNoRlkxgN2YcWvPoxbalGAYzcpyggT2
6xZY8R4tvB1oNTCArqwf860kkofUoJCr88D/pU3Cv4JhjCWs4pmXTsvSqlBSlJbo
+jPBZwbn6it/6jcit6Be3rW2PtHe8tASd9Lf8/2r1ZvupXwPzcR84R4Z10ve2lqV
xxcWlMmBh51CaYI0b1/WTe9Ua+wgkCVkxbf9zNcDQXjxw2ICWK+nR/4ld4nmqVm2
C7nhvXwU8FAHl7ZgR2Z3PLrwPuhd+kd6NXQqNkS9A+n+1vSRLbRjmV8pwIPpdPEq
nslUAGJJBHDUBArxC3gOJSB+WtmaCfzDu2gepMf9Ng1H2ZhwSF/FH3v3fsJqZkzz
NBstT9KuNGQRYiCmAPJaoVAc9BoLa+BFML1govtWtpdmbFk8PZEcuUsP7iAZqFF1
uuldPyZ8huGpQSR6Oq2bILRHowfGY0npTZAyxg0Vs8UMy1HTwNOp9OuRtArMZmsJ
jFIx1QzRf9S1i6bYpOzOudoXj4ARkS1KmVExGjJFcIT0xlFSSERie2fEKSeEYOyG
G+PA2qRt/F51FGOMm1ZscjPXqk2kt3C4BFbz6Vvxsq7D3lmhvFLn4jVA8+OidsM0
YUrVMtWET/RkjEIbADbgRXxNUNo+jtQZDU9C1IiAdfk=
-----END CERTIFICATE-----

View File

@ -1,31 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFWTCCA0GgAwIBAgIUDEnKCSmIXG/akySGes7bhOGrN/8wDQYJKoZIhvcNAQEL
BQAwRjEaMBgGA1UEAwwRYXV0aGVudGlrIFRlc3QgQ0ExEjAQBgNVBAoMCWF1dGhl
bnRpazEUMBIGA1UECwwLU2VsZi1zaWduZWQwHhcNMjUwNTE5MTIzODQ2WhcNMjYw
NTE1MTIzODQ2WjARMQ8wDQYDVQQDDAZjbGllbnQwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQCkPkS1V6l0gj0ulxMznkxkgrw4p9Tjd8teSsGZt02A2Eo6
7D8FbJ7pp3d5fYW/TWuEKVBLWTID6rijW5EGcdgTM5Jxf/QR+aZTEK6umQxUd4yO
mOtp+xVS3KlcsSej2dFpeE5h5VkZizHpvh5xkoAP8W5VtQLOVF0hIeumHnJmaeLj
+mhK9PBFpO7k9SFrYYhd/uLrYbIdANihbIO2Q74rNEJHewhFNM7oNSjjEWzRd/7S
qNdQij9JGrVG7u8YJJscEQHqyHMYFVCEMjxmsge5BO6Vx5OWmUE3wXPzb5TbyTS4
+yg88g9rYTUXrzz+poCyKpaur45qBsdw35lJ8nq69VJj2xJLGQDwoTgGSXRuPciC
3OilQI+Ma+j8qQGJxJ8WJxISlf1cuhp+V4ZUd1lawlM5hAXyXmHRlH4pun4y+g7O
O34+fE3pK25JjVCicMT/rC2A/sb95j/fHTzzJpbB70U0I50maTcIsOkyw6aiF//E
0ShTDz14x22SCMolUc6hxTDZvBB6yrcJHd7d9CCnFH2Sgo13QrtNJ/atXgm13HGh
wBzRwK38XUGl/J4pJaxAupTVCPriStUM3m0EYHNelRRUE91pbyeGT0rvOuv00uLw
Rj7K7hJZR8avTKWmKrVBVpq+gSojGW1DwBS0NiDNkZs0d/IjB1wkzczEgdZjXwID
AQABo3QwcjAfBgNVHSMEGDAWgBTa+Ns6QzqlNvnTGszkouQQtZnVJDAdBgNVHSUE
FjAUBggrBgEFBQcDAgYIKwYBBQUHAwEwEQYDVR0RBAowCIIGY2xpZW50MB0GA1Ud
DgQWBBT1xg5sXkypRBwvCxBuyfoanaiZ5jANBgkqhkiG9w0BAQsFAAOCAgEAvUAz
YwIjxY/0KHZDU8owdILVqKChzfLcy9OHNPyEI3TSOI8X6gNtBO+HE6r8aWGcC9vw
zzeIsNQ3UEjvRWi2r+vUVbiPTbFdZboNDSZv6ZmGHxwd85VsjXRGoXV6koCT/9zi
9/lCM1DwqwYSwBphMJdRVFRUMluSYk1oHflGeA18xgGuts4eFivJwhabGm1AdVVQ
/CYvqCuTxd/DCzWZBdyxYpDru64i/kyeJCt1pThKEFDWmpumFdBI4CxJ0OhxVSGp
dOXzK+Y6ULepxCvi6/OpSog52jQ6PnNd1ghiYtq7yO1T4GQz65M1vtHHVvQ3gfBE
AuKYQp6io7ypitRx+LpjsBQenyP4FFGfrq7pm90nLluOBOArfSdF0N+CP2wo/YFV
9BGf89OtvRi3BXCm2NXkE/Sc4We26tY8x7xNLOmNs8YOT0O3r/EQ690W9GIwRMx0
m0r/RXWn5V3o4Jib9r8eH9NzaDstD8g9dECcGfM4fHoM/DAGFaRrNcjMsS1APP3L
jp7+BfBSXtrz9V6rVJ3CBLXlLK0AuSm7bqd1MJsGA9uMLpsVZIUA+KawcmPGdPU+
NxdpBCtzyurQSUyaTLtVqSeP35gMAwaNzUDph8Uh+vHz+kRwgXS19OQvTaud5LJu
nQe4JNS+u5e2VDEBWUxt8NTpu6eShDN0iIEHtxA=
-----END CERTIFICATE-----

View File

@ -1,228 +0,0 @@
from unittest.mock import MagicMock, patch
from urllib.parse import quote_plus
from django.urls import reverse
from guardian.shortcuts import assign_perm
from authentik.core.models import User
from authentik.core.tests.utils import (
create_test_brand,
create_test_cert,
create_test_flow,
create_test_user,
)
from authentik.crypto.models import CertificateKeyPair
from authentik.enterprise.stages.mtls.models import (
CertAttributes,
MutualTLSStage,
TLSMode,
UserAttributes,
)
from authentik.enterprise.stages.mtls.stage import PLAN_CONTEXT_CERTIFICATE
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.outposts.models import Outpost, OutpostType
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
class MTLSStageTests(FlowTestCase):
def setUp(self):
super().setUp()
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
self.ca = CertificateKeyPair.objects.create(
name=generate_id(),
certificate_data=load_fixture("fixtures/ca.pem"),
)
self.stage = MutualTLSStage.objects.create(
name=generate_id(),
mode=TLSMode.REQUIRED,
cert_attribute=CertAttributes.COMMON_NAME,
user_attribute=UserAttributes.USERNAME,
)
self.stage.certificate_authorities.add(self.ca)
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0)
self.client_cert = load_fixture("fixtures/cert_client.pem")
# User matching the certificate
User.objects.filter(username="client").delete()
self.cert_user = create_test_user(username="client")
def test_parse_xfcc(self):
"""Test authentik Proxy/Envoy's XFCC format"""
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-Client-Cert": f"Cert={quote_plus(self.client_cert)}"},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_parse_nginx(self):
"""Test nginx's format"""
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"SSL-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_parse_traefik(self):
"""Test traefik's format"""
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_parse_outpost_object(self):
"""Test outposts's format"""
outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
assign_perm("pass_outpost_certificate", outpost.user, self.stage)
with patch(
"authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
MagicMock(return_value=outpost.user),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_parse_outpost_global(self):
"""Test outposts's format"""
outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
assign_perm("authentik_stages_mtls.pass_outpost_certificate", outpost.user)
with patch(
"authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
MagicMock(return_value=outpost.user),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_parse_outpost_no_perm(self):
"""Test outposts's format"""
outpost = Outpost.objects.create(name=generate_id(), type=OutpostType.PROXY)
with patch(
"authentik.root.middleware.ClientIPMiddleware.get_outpost_user",
MagicMock(return_value=outpost.user),
):
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Authentik-Outpost-Certificate": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
def test_invalid_cert(self):
"""Test invalid certificate"""
cert = create_test_cert()
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(cert.certificate_data)},
)
self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
self.assertNotIn(PLAN_CONTEXT_PENDING_USER, plan().context)
def test_auth_no_user(self):
"""Test auth with no user"""
User.objects.filter(username="client").delete()
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
def test_brand_ca(self):
"""Test using a CA from the brand"""
self.stage.certificate_authorities.clear()
brand = create_test_brand()
brand.client_certificates.add(self.ca)
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PENDING_USER], self.cert_user)
def test_no_ca_optional(self):
"""Test using no CA Set"""
self.stage.mode = TLSMode.OPTIONAL
self.stage.certificate_authorities.clear()
self.stage.save()
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
def test_no_ca_required(self):
"""Test using no CA Set"""
self.stage.certificate_authorities.clear()
self.stage.save()
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageResponse(res, self.flow, component="ak-stage-access-denied")
def test_no_cert_optional(self):
"""Test using no cert Set"""
self.stage.mode = TLSMode.OPTIONAL
self.stage.save()
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
def test_enroll(self):
"""Test Enrollment flow"""
self.flow.designation = FlowDesignation.ENROLLMENT
self.flow.save()
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
headers={"X-Forwarded-TLS-Client-Cert": quote_plus(self.client_cert)},
)
self.assertEqual(res.status_code, 200)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
self.assertEqual(plan().context[PLAN_CONTEXT_PROMPT], {"email": None, "name": "client"})
self.assertEqual(
plan().context[PLAN_CONTEXT_CERTIFICATE],
{
"fingerprint_sha1": "52:39:ca:1e:3a:1f:78:3a:9f:26:3b:c2:84:99:48:68:99:99:81:8a",
"fingerprint_sha256": (
"c1:07:8b:7c:e9:02:57:87:1e:92:e5:81:83:21:bc:92:c7:47:65:e3:97:fb:05:97:6f:36:9e:b5:31:77:98:b7"
),
"issuer": "OU=Self-signed,O=authentik,CN=authentik Test CA",
"serial_number": "70153443448884702681996102271549704759327537151",
"subject": "CN=client",
},
)

View File

@ -1,5 +0,0 @@
"""API URLs"""
from authentik.enterprise.stages.mtls.api import MutualTLSStageViewSet
api_urlpatterns = [("stages/mtls", MutualTLSStageViewSet)]

View File

@ -8,7 +8,6 @@ from django.test import TestCase
from django.utils.timezone import now
from rest_framework.exceptions import ValidationError
from authentik.core.models import User
from authentik.enterprise.license import LicenseKey
from authentik.enterprise.models import (
THRESHOLD_READ_ONLY_WEEKS,
@ -72,9 +71,9 @@ class TestEnterpriseLicense(TestCase):
)
def test_valid_multiple(self):
"""Check license verification"""
lic = License.objects.create(key=generate_id(), expiry=expiry_valid)
lic = License.objects.create(key=generate_id())
self.assertTrue(lic.status.status().is_valid)
lic2 = License.objects.create(key=generate_id(), expiry=expiry_valid)
lic2 = License.objects.create(key=generate_id())
self.assertTrue(lic2.status.status().is_valid)
total = LicenseKey.get_total()
self.assertEqual(total.internal_users, 200)
@ -233,9 +232,7 @@ class TestEnterpriseLicense(TestCase):
)
def test_expiry_expired(self):
"""Check license verification"""
User.objects.all().delete()
License.objects.all().delete()
License.objects.create(key=generate_id(), expiry=expiry_expired)
License.objects.create(key=generate_id())
self.assertEqual(LicenseKey.get_total().summary().status, LicenseUsageStatus.EXPIRED)
@patch(

View File

@ -57,7 +57,7 @@ class LogEventSerializer(PassiveSerializer):
@contextmanager
def capture_logs(log_default_output=True) -> Generator[list[LogEvent]]:
def capture_logs(log_default_output=True) -> Generator[list[LogEvent], None, None]:
"""Capture log entries created"""
logs = []
cap = LogCapture()

View File

@ -1,18 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-27 12:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0027_auto_20231028_1424"),
]
operations = [
migrations.AddField(
model_name="flowtoken",
name="revoke_on_execution",
field=models.BooleanField(default=True),
),
]

View File

@ -303,10 +303,9 @@ class FlowToken(Token):
flow = models.ForeignKey(Flow, on_delete=models.CASCADE)
_plan = models.TextField()
revoke_on_execution = models.BooleanField(default=True)
@staticmethod
def pickle(plan: "FlowPlan") -> str:
def pickle(plan) -> str:
"""Pickle into string"""
data = dumps(plan)
return b64encode(data).decode()

View File

@ -99,10 +99,9 @@ class ChallengeStageView(StageView):
self.logger.debug("Got StageInvalidException", exc=exc)
return self.executor.stage_invalid()
if not challenge.is_valid():
self.logger.error(
self.logger.warning(
"f(ch): Invalid challenge",
errors=challenge.errors,
challenge=challenge.data,
)
return HttpChallengeResponse(challenge)

View File

@ -15,7 +15,6 @@
{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/sfe/bootstrap.min.css' %}">
<meta name="sentry-trace" content="{{ sentry_trace }}" />
<link rel="prefetch" href="{{ flow_background_url }}" />
{% include "base/header_js.html" %}
<style>
html,
@ -23,7 +22,7 @@
height: 100%;
}
body {
background-image: url("{{ flow_background_url }}");
background-image: url("{{ flow.background_url }}");
background-repeat: no-repeat;
background-size: cover;
}

View File

@ -5,9 +5,9 @@
{% block head_before %}
{{ block.super }}
<link rel="prefetch" href="{{ flow_background_url }}" />
<link rel="prefetch" href="{{ flow.background_url }}" />
{% if flow.compatibility_mode and not inspector %}
<script>ShadyDOM = { force: true };</script>
<script>ShadyDOM = { force: !navigator.webdriver };</script>
{% endif %}
{% include "base/header_js.html" %}
<script>
@ -21,7 +21,7 @@ window.authentik.flow = {
<script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script>
<style>
:root {
--ak-flow-background: url("{{ flow_background_url }}");
--ak-flow-background: url("{{ flow.background_url }}");
}
</style>
{% endblock %}

View File

@ -1,10 +1,7 @@
"""Test helpers"""
from collections.abc import Callable, Generator
from contextlib import contextmanager
from json import loads
from typing import Any
from unittest.mock import MagicMock, patch
from django.http.response import HttpResponse
from django.urls.base import reverse
@ -12,8 +9,6 @@ from rest_framework.test import APITestCase
from authentik.core.models import User
from authentik.flows.models import Flow
from authentik.flows.planner import FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
class FlowTestCase(APITestCase):
@ -49,12 +44,3 @@ class FlowTestCase(APITestCase):
def assertStageRedirects(self, response: HttpResponse, to: str) -> dict[str, Any]:
"""Wrapper around assertStageResponse that checks for a redirect"""
return self.assertStageResponse(response, component="xak-flow-redirect", to=to)
@contextmanager
def assertFlowFinishes(self) -> Generator[Callable[[], FlowPlan]]:
"""Capture the flow plan before the flow finishes and return it"""
try:
with patch("authentik.flows.views.executor.FlowExecutorView.cancel", MagicMock()):
yield lambda: self.client.session.get(SESSION_KEY_PLAN)
finally:
pass

View File

@ -146,8 +146,7 @@ class FlowExecutorView(APIView):
except (AttributeError, EOFError, ImportError, IndexError) as exc:
LOGGER.warning("f(exec): Failed to restore token plan", exc=exc)
finally:
if token.revoke_on_execution:
token.delete()
token.delete()
if not isinstance(plan, FlowPlan):
return None
plan.context[PLAN_CONTEXT_IS_RESTORED] = token

View File

@ -13,9 +13,7 @@ class FlowInterfaceView(InterfaceView):
"""Flow interface"""
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
flow = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug"))
kwargs["flow"] = flow
kwargs["flow_background_url"] = flow.background_url(self.request)
kwargs["flow"] = get_object_or_404(Flow, slug=self.kwargs.get("flow_slug"))
kwargs["inspector"] = "inspector" in self.request.GET
return super().get_context_data(**kwargs)

View File

@ -363,9 +363,6 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
pool_options = config.get_dict_from_b64_json("postgresql.pool_options", True)
if not pool_options:
pool_options = True
# FIXME: Temporarily force pool to be deactivated.
# See https://github.com/goauthentik/authentik/issues/14320
pool_options = False
db = {
"default": {

View File

@ -81,6 +81,7 @@ debugger: false
log_level: info
session_storage: cache
sessions:
unauthenticated_age: days=1

View File

@ -17,7 +17,7 @@ from ldap3.core.exceptions import LDAPException
from redis.exceptions import ConnectionError as RedisConnectionError
from redis.exceptions import RedisError, ResponseError
from rest_framework.exceptions import APIException
from sentry_sdk import HttpTransport, get_current_scope
from sentry_sdk import HttpTransport
from sentry_sdk import init as sentry_sdk_init
from sentry_sdk.api import set_tag
from sentry_sdk.integrations.argv import ArgvIntegration
@ -27,7 +27,6 @@ from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.socket import SocketIntegration
from sentry_sdk.integrations.stdlib import StdlibIntegration
from sentry_sdk.integrations.threading import ThreadingIntegration
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME
from structlog.stdlib import get_logger
from websockets.exceptions import WebSocketException
@ -96,8 +95,6 @@ def traces_sampler(sampling_context: dict) -> float:
return 0
if _type == "websocket":
return 0
if CONFIG.get_bool("debug"):
return 1
return float(CONFIG.get("error_reporting.sample_rate", 0.1))
@ -170,14 +167,3 @@ def before_send(event: dict, hint: dict) -> dict | None:
if settings.DEBUG:
return None
return event
def get_http_meta():
"""Get sentry-related meta key-values"""
scope = get_current_scope()
meta = {
SENTRY_TRACE_HEADER_NAME: scope.get_traceparent() or "",
}
if bag := scope.get_baggage():
meta[BAGGAGE_HEADER_NAME] = bag.serialize()
return meta

View File

@ -59,7 +59,7 @@ class PropertyMappingManager:
request: HttpRequest | None,
return_mapping: bool = False,
**kwargs,
) -> Generator[tuple[dict, PropertyMapping]]:
) -> Generator[tuple[dict, PropertyMapping], None]:
"""Iterate over all mappings that were pre-compiled and
execute all of them with the given context"""
if not self.__has_compiled:

View File

@ -23,6 +23,7 @@ if TYPE_CHECKING:
class Direction(StrEnum):
add = "add"
remove = "remove"
@ -36,16 +37,13 @@ SAFE_METHODS = [
class BaseOutgoingSyncClient[
TModel: "Model",
TConnection: "Model",
TSchema: dict,
TProvider: "OutgoingSyncProvider",
TModel: "Model", TConnection: "Model", TSchema: dict, TProvider: "OutgoingSyncProvider"
]:
"""Basic Outgoing sync client Client"""
provider: TProvider
connection_type: type[TConnection]
connection_attr: str
connection_type_query: str
mapper: PropertyMappingManager
can_discover = False
@ -65,7 +63,9 @@ class BaseOutgoingSyncClient[
def write(self, obj: TModel) -> tuple[TConnection, bool]:
"""Write object to destination. Uses self.create and self.update, but
can be overwritten for further logic"""
connection = getattr(obj, self.connection_attr).filter(provider=self.provider).first()
connection = self.connection_type.objects.filter(
provider=self.provider, **{self.connection_type_query: obj}
).first()
try:
if not connection:
connection = self.create(obj)

View File

@ -1,7 +1,6 @@
from collections.abc import Callable
from dataclasses import asdict
from celery import group
from celery.exceptions import Retry
from celery.result import allow_join_result
from django.core.paginator import Paginator
@ -83,41 +82,21 @@ class SyncTasks:
self.logger.debug("Failed to acquire sync lock, skipping", provider=provider.name)
return
try:
messages.append(_("Syncing users"))
user_results = (
group(
[
sync_objects.signature(
args=(class_to_path(User), page, provider_pk),
time_limit=PAGE_TIMEOUT,
soft_time_limit=PAGE_TIMEOUT,
)
for page in users_paginator.page_range
]
)
.apply_async()
.get()
)
for result in user_results:
for msg in result:
for page in users_paginator.page_range:
messages.append(_("Syncing page {page} of users".format(page=page)))
for msg in sync_objects.apply_async(
args=(class_to_path(User), page, provider_pk),
time_limit=PAGE_TIMEOUT,
soft_time_limit=PAGE_TIMEOUT,
).get():
messages.append(LogEvent(**msg))
messages.append(_("Syncing groups"))
group_results = (
group(
[
sync_objects.signature(
args=(class_to_path(Group), page, provider_pk),
time_limit=PAGE_TIMEOUT,
soft_time_limit=PAGE_TIMEOUT,
)
for page in groups_paginator.page_range
]
)
.apply_async()
.get()
)
for result in group_results:
for msg in result:
for page in groups_paginator.page_range:
messages.append(_("Syncing page {page} of groups".format(page=page)))
for msg in sync_objects.apply_async(
args=(class_to_path(Group), page, provider_pk),
time_limit=PAGE_TIMEOUT,
soft_time_limit=PAGE_TIMEOUT,
).get():
messages.append(LogEvent(**msg))
except TransientSyncException as exc:
self.logger.warning("transient sync exception", exc=exc)
@ -130,7 +109,7 @@ class SyncTasks:
def sync_objects(
self, object_type: str, page: int, provider_pk: int, override_dry_run=False, **filter
):
_object_type: type[Model] = path_to_class(object_type)
_object_type = path_to_class(object_type)
self.logger = get_logger().bind(
provider_type=class_to_path(self._provider_model),
provider_pk=provider_pk,
@ -153,19 +132,6 @@ class SyncTasks:
self.logger.debug("starting discover")
client.discover()
self.logger.debug("starting sync for page", page=page)
messages.append(
asdict(
LogEvent(
_(
"Syncing page {page} of {object_type}".format(
page=page, object_type=_object_type._meta.verbose_name_plural
)
),
log_level="info",
logger=f"{provider._meta.verbose_name}@{object_type}",
)
)
)
for obj in paginator.page(page).object_list:
obj: Model
try:

View File

@ -494,88 +494,86 @@ class TestConfig(TestCase):
},
)
# FIXME: Temporarily force pool to be deactivated.
# See https://github.com/goauthentik/authentik/issues/14320
# def test_db_pool(self):
# """Test DB Config with pool"""
# config = ConfigLoader()
# config.set("postgresql.host", "foo")
# config.set("postgresql.name", "foo")
# config.set("postgresql.user", "foo")
# config.set("postgresql.password", "foo")
# config.set("postgresql.port", "foo")
# config.set("postgresql.test.name", "foo")
# config.set("postgresql.use_pool", True)
# conf = django_db_config(config)
# self.assertEqual(
# conf,
# {
# "default": {
# "ENGINE": "authentik.root.db",
# "HOST": "foo",
# "NAME": "foo",
# "OPTIONS": {
# "pool": True,
# "sslcert": None,
# "sslkey": None,
# "sslmode": None,
# "sslrootcert": None,
# },
# "PASSWORD": "foo",
# "PORT": "foo",
# "TEST": {"NAME": "foo"},
# "USER": "foo",
# "CONN_MAX_AGE": 0,
# "CONN_HEALTH_CHECKS": False,
# "DISABLE_SERVER_SIDE_CURSORS": False,
# }
# },
# )
def test_db_pool(self):
"""Test DB Config with pool"""
config = ConfigLoader()
config.set("postgresql.host", "foo")
config.set("postgresql.name", "foo")
config.set("postgresql.user", "foo")
config.set("postgresql.password", "foo")
config.set("postgresql.port", "foo")
config.set("postgresql.test.name", "foo")
config.set("postgresql.use_pool", True)
conf = django_db_config(config)
self.assertEqual(
conf,
{
"default": {
"ENGINE": "authentik.root.db",
"HOST": "foo",
"NAME": "foo",
"OPTIONS": {
"pool": True,
"sslcert": None,
"sslkey": None,
"sslmode": None,
"sslrootcert": None,
},
"PASSWORD": "foo",
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"DISABLE_SERVER_SIDE_CURSORS": False,
}
},
)
# def test_db_pool_options(self):
# """Test DB Config with pool"""
# config = ConfigLoader()
# config.set("postgresql.host", "foo")
# config.set("postgresql.name", "foo")
# config.set("postgresql.user", "foo")
# config.set("postgresql.password", "foo")
# config.set("postgresql.port", "foo")
# config.set("postgresql.test.name", "foo")
# config.set("postgresql.use_pool", True)
# config.set(
# "postgresql.pool_options",
# base64.b64encode(
# dumps(
# {
# "max_size": 15,
# }
# ).encode()
# ).decode(),
# )
# conf = django_db_config(config)
# self.assertEqual(
# conf,
# {
# "default": {
# "ENGINE": "authentik.root.db",
# "HOST": "foo",
# "NAME": "foo",
# "OPTIONS": {
# "pool": {
# "max_size": 15,
# },
# "sslcert": None,
# "sslkey": None,
# "sslmode": None,
# "sslrootcert": None,
# },
# "PASSWORD": "foo",
# "PORT": "foo",
# "TEST": {"NAME": "foo"},
# "USER": "foo",
# "CONN_MAX_AGE": 0,
# "CONN_HEALTH_CHECKS": False,
# "DISABLE_SERVER_SIDE_CURSORS": False,
# }
# },
# )
def test_db_pool_options(self):
"""Test DB Config with pool"""
config = ConfigLoader()
config.set("postgresql.host", "foo")
config.set("postgresql.name", "foo")
config.set("postgresql.user", "foo")
config.set("postgresql.password", "foo")
config.set("postgresql.port", "foo")
config.set("postgresql.test.name", "foo")
config.set("postgresql.use_pool", True)
config.set(
"postgresql.pool_options",
base64.b64encode(
dumps(
{
"max_size": 15,
}
).encode()
).decode(),
)
conf = django_db_config(config)
self.assertEqual(
conf,
{
"default": {
"ENGINE": "authentik.root.db",
"HOST": "foo",
"NAME": "foo",
"OPTIONS": {
"pool": {
"max_size": 15,
},
"sslcert": None,
"sslkey": None,
"sslmode": None,
"sslrootcert": None,
},
"PASSWORD": "foo",
"PORT": "foo",
"TEST": {"NAME": "foo"},
"USER": "foo",
"CONN_MAX_AGE": 0,
"CONN_HEALTH_CHECKS": False,
"DISABLE_SERVER_SIDE_CURSORS": False,
}
},
)

View File

@ -1,11 +1,9 @@
"""Websocket tests"""
from dataclasses import asdict
from unittest.mock import patch
from channels.routing import URLRouter
from channels.testing import WebsocketCommunicator
from django.contrib.contenttypes.models import ContentType
from django.test import TransactionTestCase
from authentik import __version__
@ -16,12 +14,6 @@ from authentik.providers.proxy.models import ProxyProvider
from authentik.root import websocket
def patched__get_ct_cached(app_label, codename):
"""Caches `ContentType` instances like its `QuerySet` does."""
return ContentType.objects.get(app_label=app_label, permission__codename=codename)
@patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached)
class TestOutpostWS(TransactionTestCase):
"""Websocket tests"""
@ -46,7 +38,6 @@ class TestOutpostWS(TransactionTestCase):
)
connected, _ = await communicator.connect()
self.assertFalse(connected)
await communicator.disconnect()
async def test_auth_valid(self):
"""Test auth with token"""
@ -57,7 +48,6 @@ class TestOutpostWS(TransactionTestCase):
)
connected, _ = await communicator.connect()
self.assertTrue(connected)
await communicator.disconnect()
async def test_send(self):
"""Test sending of Hello"""

View File

@ -7,8 +7,10 @@ from django.db import migrations
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.models import User
from django.apps import apps as real_apps
from django.contrib.auth.management import create_permissions
from guardian.shortcuts import UserObjectPermission
db_alias = schema_editor.connection.alias

View File

@ -50,4 +50,3 @@ AMR_PASSWORD = "pwd" # nosec
AMR_MFA = "mfa"
AMR_OTP = "otp"
AMR_WEBAUTHN = "user"
AMR_SMART_CARD = "sc"

View File

@ -16,7 +16,6 @@ from authentik.providers.oauth2.constants import (
ACR_AUTHENTIK_DEFAULT,
AMR_MFA,
AMR_PASSWORD,
AMR_SMART_CARD,
AMR_WEBAUTHN,
)
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
@ -140,10 +139,9 @@ class IDToken:
amr.append(AMR_PASSWORD)
if method == "auth_webauthn_pwl":
amr.append(AMR_WEBAUTHN)
if "certificate" in method_args:
amr.append(AMR_SMART_CARD)
if "mfa_devices" in method_args:
amr.append(AMR_MFA)
if len(amr) > 0:
amr.append(AMR_MFA)
if amr:
id_token.amr = amr

View File

@ -47,8 +47,6 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
def reconcile(self, current: V1Ingress, reference: V1Ingress):
super().reconcile(current, reference)
self._check_annotations(current, reference)
if current.spec.ingress_class_name != reference.spec.ingress_class_name:
raise NeedsUpdate()
# Create a list of all expected host and tls hosts
expected_hosts = []
expected_hosts_tls = []

View File

@ -166,6 +166,7 @@ class ConnectionToken(ExpiringModel):
always_merger.merge(settings, default_settings)
always_merger.merge(settings, self.endpoint.provider.settings)
always_merger.merge(settings, self.endpoint.settings)
always_merger.merge(settings, self.settings)
def mapping_evaluator(mappings: QuerySet):
for mapping in mappings:
@ -190,7 +191,6 @@ class ConnectionToken(ExpiringModel):
mapping_evaluator(
RACPropertyMapping.objects.filter(endpoint__in=[self.endpoint]).order_by("name")
)
always_merger.merge(settings, self.settings)
settings["drive-path"] = f"/tmp/connection/{self.token}" # nosec
settings["create-drive-path"] = "true"

View File

@ -90,6 +90,23 @@ class TestModels(TransactionTestCase):
"resize-method": "display-update",
},
)
# Set settings in token
token.settings = {
"level": "token",
}
token.save()
self.assertEqual(
token.get_settings(),
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"drive-path": path,
"create-drive-path": "true",
"level": "token",
"resize-method": "display-update",
},
)
# Set settings in property mapping (provider)
mapping = RACPropertyMapping.objects.create(
name=generate_id(),
@ -134,22 +151,3 @@ class TestModels(TransactionTestCase):
"resize-method": "display-update",
},
)
# Set settings in token
token.settings = {
"level": "token",
}
token.save()
self.assertEqual(
token.get_settings(),
{
"hostname": self.endpoint.host.split(":")[0],
"port": "1324",
"client-name": f"authentik - {self.user}",
"drive-path": path,
"create-drive-path": "true",
"foo": "true",
"bar": "6",
"resize-method": "display-update",
"level": "token",
},
)

View File

@ -34,7 +34,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
"""SCIM client for groups"""
connection_type = SCIMProviderGroup
connection_attr = "scimprovidergroup_set"
connection_type_query = "group"
mapper: PropertyMappingManager
def __init__(self, provider: SCIMProvider):
@ -47,16 +47,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
def to_schema(self, obj: Group, connection: SCIMProviderGroup) -> SCIMGroupSchema:
"""Convert authentik user into SCIM"""
raw_scim_group = super().to_schema(obj, connection)
raw_scim_group = super().to_schema(
obj,
connection,
schemas=(SCIM_GROUP_SCHEMA,),
)
try:
scim_group = SCIMGroupSchema.model_validate(delete_none_values(raw_scim_group))
except ValidationError as exc:
raise StopSync(exc, obj) from exc
if SCIM_GROUP_SCHEMA not in scim_group.schemas:
scim_group.schemas.insert(0, SCIM_GROUP_SCHEMA)
# As this might be unset, we need to tell pydantic it's set so ensure the schemas
# are included, even if its just the defaults
scim_group.schemas = list(scim_group.schemas)
if not scim_group.externalId:
scim_group.externalId = str(obj.pk)
@ -200,7 +199,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
chunk_size = len(ops)
if len(ops) < 1:
return
for chunk in batched(ops, chunk_size, strict=False):
for chunk in batched(ops, chunk_size):
req = PatchRequest(Operations=list(chunk))
self._request(
"PATCH",

View File

@ -18,7 +18,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
"""SCIM client for users"""
connection_type = SCIMProviderUser
connection_attr = "scimprovideruser_set"
connection_type_query = "user"
mapper: PropertyMappingManager
def __init__(self, provider: SCIMProvider):
@ -31,16 +31,15 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
def to_schema(self, obj: User, connection: SCIMProviderUser) -> SCIMUserSchema:
"""Convert authentik user into SCIM"""
raw_scim_user = super().to_schema(obj, connection)
raw_scim_user = super().to_schema(
obj,
connection,
schemas=(SCIM_USER_SCHEMA,),
)
try:
scim_user = SCIMUserSchema.model_validate(delete_none_values(raw_scim_user))
except ValidationError as exc:
raise StopSync(exc, obj) from exc
if SCIM_USER_SCHEMA not in scim_user.schemas:
scim_user.schemas.insert(0, SCIM_USER_SCHEMA)
# As this might be unset, we need to tell pydantic it's set so ensure the schemas
# are included, even if its just the defaults
scim_user.schemas = list(scim_user.schemas)
if not scim_user.externalId:
scim_user.externalId = str(obj.uid)
return scim_user

View File

@ -116,7 +116,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
if type == User:
# Get queryset of all users with consistent ordering
# according to the provider's settings
base = User.objects.prefetch_related("scimprovideruser_set").all().exclude_anonymous()
base = User.objects.all().exclude_anonymous()
if self.exclude_users_service_account:
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
@ -126,7 +126,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
return base.order_by("pk")
if type == Group:
# Get queryset of all groups with consistent ordering
return Group.objects.prefetch_related("scimprovidergroup_set").all().order_by("pk")
return Group.objects.all().order_by("pk")
raise ValueError(f"Invalid type {type}")
@property

View File

@ -91,57 +91,6 @@ class SCIMUserTests(TestCase):
},
)
@Mocker()
def test_user_create_custom_schema(self, mock: Mocker):
"""Test user creation with custom schema"""
schema = SCIMMapping.objects.create(
name="custom_schema",
expression="""return {"schemas": ["foo"]}""",
)
self.provider.property_mappings.add(schema)
scim_id = generate_id()
mock.get(
"https://localhost/ServiceProviderConfig",
json={},
)
mock.post(
"https://localhost/Users",
json={
"id": scim_id,
},
)
uid = generate_id()
user = User.objects.create(
username=uid,
name=f"{uid} {uid}",
email=f"{uid}@goauthentik.io",
)
self.assertEqual(mock.call_count, 2)
self.assertEqual(mock.request_history[0].method, "GET")
self.assertEqual(mock.request_history[1].method, "POST")
self.assertJSONEqual(
mock.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User", "foo"],
"active": True,
"emails": [
{
"primary": True,
"type": "other",
"value": f"{uid}@goauthentik.io",
}
],
"externalId": user.uid,
"name": {
"familyName": uid,
"formatted": f"{uid} {uid}",
"givenName": uid,
},
"displayName": f"{uid} {uid}",
"userName": uid,
},
)
@Mocker()
def test_user_create_different_provider_same_id(self, mock: Mocker):
"""Test user creation with multiple providers that happen
@ -435,7 +384,7 @@ class SCIMUserTests(TestCase):
self.assertIn(request.method, SAFE_METHODS)
task = SystemTask.objects.filter(uid=slugify(self.provider.name)).first()
self.assertIsNotNone(task)
drop_msg = task.messages[3]
drop_msg = task.messages[2]
self.assertEqual(drop_msg["event"], "Dropping mutating request due to dry run")
self.assertIsNotNone(drop_msg["attributes"]["url"])
self.assertIsNotNone(drop_msg["attributes"]["body"])

View File

@ -132,7 +132,7 @@ TENANT_CREATION_FAKES_MIGRATIONS = True
TENANT_BASE_SCHEMA = "template"
PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema")
GUARDIAN_MONKEY_PATCH_USER = False
GUARDIAN_MONKEY_PATCH = False
SPECTACULAR_SETTINGS = {
"TITLE": "authentik",
@ -424,7 +424,7 @@ else:
"BACKEND": "authentik.root.storages.FileStorage",
"OPTIONS": {
"location": Path(CONFIG.get("storage.media.file.path")),
"base_url": CONFIG.get("web.path", "/") + "media/",
"base_url": "/media/",
},
}
# Compatibility for apps not supporting top-level STORAGES

View File

@ -31,8 +31,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
if kwargs.get("randomly_seed", None):
self.args.append(f"--randomly-seed={kwargs['randomly_seed']}")
if kwargs.get("no_capture", False):
self.args.append("--capture=no")
settings.TEST = True
settings.CELERY["task_always_eager"] = True
@ -66,11 +64,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
"Default behaviour: use random.Random().getrandbits(32), so the seed is"
"different on each run.",
)
parser.add_argument(
"--no-capture",
action="store_true",
help="Disable any capturing of stdout/stderr during tests.",
)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""Run pytest and return the exitcode.

View File

@ -317,7 +317,7 @@ class KerberosSource(Source):
usage="accept", name=name, store=self.get_gssapi_store()
)
except gssapi.exceptions.GSSError as exc:
LOGGER.warning("GSSAPI credentials failure", exc=exc)
LOGGER.warn("GSSAPI credentials failure", exc=exc)
return None

View File

@ -103,7 +103,6 @@ class LDAPSourceSerializer(SourceSerializer):
"user_object_filter",
"group_object_filter",
"group_membership_field",
"user_membership_attribute",
"object_uniqueness_field",
"password_login_update_internal_password",
"sync_users",
@ -112,7 +111,6 @@ class LDAPSourceSerializer(SourceSerializer):
"sync_parent_group",
"connectivity",
"lookup_groups_from_user",
"delete_not_found_objects",
]
extra_kwargs = {"bind_password": {"write_only": True}}
@ -140,7 +138,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
"user_object_filter",
"group_object_filter",
"group_membership_field",
"user_membership_attribute",
"object_uniqueness_field",
"password_login_update_internal_password",
"sync_users",
@ -150,7 +147,6 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
"user_property_mappings",
"group_property_mappings",
"lookup_groups_from_user",
"delete_not_found_objects",
]
search_fields = ["name", "slug"]
ordering = ["name"]

View File

@ -1,48 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-28 08:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0048_delete_oldauthenticatedsession_content_type"),
("authentik_sources_ldap", "0008_groupldapsourceconnection_userldapsourceconnection"),
]
operations = [
migrations.AddField(
model_name="groupldapsourceconnection",
name="validated_by",
field=models.UUIDField(
blank=True,
help_text="Unique ID used while checking if this object still exists in the directory.",
null=True,
),
),
migrations.AddField(
model_name="ldapsource",
name="delete_not_found_objects",
field=models.BooleanField(
default=False,
help_text="Delete authentik users and groups which were previously supplied by this source, but are now missing from it.",
),
),
migrations.AddField(
model_name="userldapsourceconnection",
name="validated_by",
field=models.UUIDField(
blank=True,
help_text="Unique ID used while checking if this object still exists in the directory.",
null=True,
),
),
migrations.AddIndex(
model_name="groupldapsourceconnection",
index=models.Index(fields=["validated_by"], name="authentik_s_validat_b70447_idx"),
),
migrations.AddIndex(
model_name="userldapsourceconnection",
index=models.Index(fields=["validated_by"], name="authentik_s_validat_ff2ebc_idx"),
),
]

View File

@ -1,32 +0,0 @@
# Generated by Django 5.1.9 on 2025-05-29 11:22
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def set_user_membership_attribute(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
LDAPSource = apps.get_model("authentik_sources_ldap", "LDAPSource")
db_alias = schema_editor.connection.alias
LDAPSource.objects.using(db_alias).filter(group_membership_field="memberUid").all().update(
user_membership_attribute="ldap_uniq"
)
class Migration(migrations.Migration):
dependencies = [
("authentik_sources_ldap", "0009_groupldapsourceconnection_validated_by_and_more"),
]
operations = [
migrations.AddField(
model_name="ldapsource",
name="user_membership_attribute",
field=models.TextField(
default="distinguishedName",
help_text="Attribute which matches the value of `group_membership_field`.",
),
),
migrations.RunPython(set_user_membership_attribute, migrations.RunPython.noop),
]

View File

@ -100,10 +100,6 @@ class LDAPSource(Source):
default="(objectClass=person)",
help_text=_("Consider Objects matching this filter to be Users."),
)
user_membership_attribute = models.TextField(
default=LDAP_DISTINGUISHED_NAME,
help_text=_("Attribute which matches the value of `group_membership_field`."),
)
group_membership_field = models.TextField(
default="member", help_text=_("Field which contains members of a group.")
)
@ -141,14 +137,6 @@ class LDAPSource(Source):
),
)
delete_not_found_objects = models.BooleanField(
default=False,
help_text=_(
"Delete authentik users and groups which were previously supplied by this source, "
"but are now missing from it."
),
)
@property
def component(self) -> str:
return "ak-source-ldap-form"
@ -333,12 +321,6 @@ class LDAPSourcePropertyMapping(PropertyMapping):
class UserLDAPSourceConnection(UserSourceConnection):
validated_by = models.UUIDField(
null=True,
blank=True,
help_text=_("Unique ID used while checking if this object still exists in the directory."),
)
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import (
@ -350,18 +332,9 @@ class UserLDAPSourceConnection(UserSourceConnection):
class Meta:
verbose_name = _("User LDAP Source Connection")
verbose_name_plural = _("User LDAP Source Connections")
indexes = [
models.Index(fields=["validated_by"]),
]
class GroupLDAPSourceConnection(GroupSourceConnection):
validated_by = models.UUIDField(
null=True,
blank=True,
help_text=_("Unique ID used while checking if this object still exists in the directory."),
)
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import (
@ -373,6 +346,3 @@ class GroupLDAPSourceConnection(GroupSourceConnection):
class Meta:
verbose_name = _("Group LDAP Source Connection")
verbose_name_plural = _("Group LDAP Source Connections")
indexes = [
models.Index(fields=["validated_by"]),
]

View File

@ -9,7 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger
from authentik.core.sources.mapper import SourceMapper
from authentik.lib.config import CONFIG
from authentik.lib.sync.mapper import PropertyMappingManager
from authentik.sources.ldap.models import LDAPSource, flatten
from authentik.sources.ldap.models import LDAPSource
class BaseLDAPSynchronizer:
@ -77,16 +77,6 @@ class BaseLDAPSynchronizer:
"""Get objects from LDAP, implemented in subclass"""
raise NotImplementedError()
def get_attributes(self, object):
if "attributes" not in object:
return
return object.get("attributes", {})
def get_identifier(self, attributes: dict):
if not attributes.get(self._source.object_uniqueness_field):
return
return flatten(attributes[self._source.object_uniqueness_field])
def search_paginator( # noqa: PLR0913
self,
search_base,

View File

@ -1,61 +0,0 @@
from collections.abc import Generator
from itertools import batched
from uuid import uuid4
from ldap3 import SUBTREE
from authentik.core.models import Group
from authentik.sources.ldap.models import GroupLDAPSourceConnection
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE, UPDATE_CHUNK_SIZE
class GroupLDAPForwardDeletion(BaseLDAPSynchronizer):
"""Delete LDAP Groups from authentik"""
@staticmethod
def name() -> str:
return "group_deletions"
def get_objects(self, **kwargs) -> Generator:
if not self._source.sync_groups or not self._source.delete_not_found_objects:
self.message("Group syncing is disabled for this Source")
return iter(())
uuid = uuid4()
groups = self._source.connection().extend.standard.paged_search(
search_base=self.base_dn_groups,
search_filter=self._source.group_object_filter,
search_scope=SUBTREE,
attributes=[self._source.object_uniqueness_field],
generator=True,
**kwargs,
)
for batch in batched(groups, UPDATE_CHUNK_SIZE, strict=False):
identifiers = []
for group in batch:
if not (attributes := self.get_attributes(group)):
continue
if identifier := self.get_identifier(attributes):
identifiers.append(identifier)
GroupLDAPSourceConnection.objects.filter(identifier__in=identifiers).update(
validated_by=uuid
)
return batched(
GroupLDAPSourceConnection.objects.filter(source=self._source)
.exclude(validated_by=uuid)
.values_list("group", flat=True)
.iterator(chunk_size=DELETE_CHUNK_SIZE),
DELETE_CHUNK_SIZE,
strict=False,
)
def sync(self, group_pks: tuple) -> int:
"""Delete authentik groups"""
if not self._source.sync_groups or not self._source.delete_not_found_objects:
self.message("Group syncing is disabled for this Source")
return -1
self._logger.debug("Deleting groups", group_pks=group_pks)
_, deleted_per_type = Group.objects.filter(pk__in=group_pks).delete()
return deleted_per_type.get(Group._meta.label, 0)

View File

@ -1,63 +0,0 @@
from collections.abc import Generator
from itertools import batched
from uuid import uuid4
from ldap3 import SUBTREE
from authentik.core.models import User
from authentik.sources.ldap.models import UserLDAPSourceConnection
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
UPDATE_CHUNK_SIZE = 10_000
DELETE_CHUNK_SIZE = 50
class UserLDAPForwardDeletion(BaseLDAPSynchronizer):
"""Delete LDAP Users from authentik"""
@staticmethod
def name() -> str:
return "user_deletions"
def get_objects(self, **kwargs) -> Generator:
if not self._source.sync_users or not self._source.delete_not_found_objects:
self.message("User syncing is disabled for this Source")
return iter(())
uuid = uuid4()
users = self._source.connection().extend.standard.paged_search(
search_base=self.base_dn_users,
search_filter=self._source.user_object_filter,
search_scope=SUBTREE,
attributes=[self._source.object_uniqueness_field],
generator=True,
**kwargs,
)
for batch in batched(users, UPDATE_CHUNK_SIZE, strict=False):
identifiers = []
for user in batch:
if not (attributes := self.get_attributes(user)):
continue
if identifier := self.get_identifier(attributes):
identifiers.append(identifier)
UserLDAPSourceConnection.objects.filter(identifier__in=identifiers).update(
validated_by=uuid
)
return batched(
UserLDAPSourceConnection.objects.filter(source=self._source)
.exclude(validated_by=uuid)
.values_list("user", flat=True)
.iterator(chunk_size=DELETE_CHUNK_SIZE),
DELETE_CHUNK_SIZE,
strict=False,
)
def sync(self, user_pks: tuple) -> int:
"""Delete authentik users"""
if not self._source.sync_users or not self._source.delete_not_found_objects:
self.message("User syncing is disabled for this Source")
return -1
self._logger.debug("Deleting users", user_pks=user_pks)
_, deleted_per_type = User.objects.filter(pk__in=user_pks).delete()
return deleted_per_type.get(User._meta.label, 0)

View File

@ -58,16 +58,18 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
return -1
group_count = 0
for group in page_data:
if (attributes := self.get_attributes(group)) is None:
if "attributes" not in group:
continue
attributes = group.get("attributes", {})
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
if not (uniq := self.get_identifier(attributes)):
if not attributes.get(self._source.object_uniqueness_field):
self.message(
f"Uniqueness field not found/not set in attributes: '{group_dn}'",
attributes=attributes.keys(),
dn=group_dn,
)
continue
uniq = flatten(attributes[self._source.object_uniqueness_field])
try:
defaults = {
k: flatten(v)

View File

@ -63,19 +63,25 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer):
group_member_dn = group_member.get("dn", {})
members.append(group_member_dn)
else:
if (attributes := self.get_attributes(group)) is None:
if "attributes" not in group:
continue
members = attributes.get(self._source.group_membership_field, [])
members = group.get("attributes", {}).get(self._source.group_membership_field, [])
ak_group = self.get_group(group)
if not ak_group:
continue
membership_mapping_attribute = LDAP_DISTINGUISHED_NAME
if self._source.group_membership_field == "memberUid":
# If memberships are based on the posixGroup's 'memberUid'
# attribute we use the RDN instead of the FDN to lookup members.
membership_mapping_attribute = LDAP_UNIQUENESS
users = User.objects.filter(
Q(**{f"attributes__{self._source.user_membership_attribute}__in": members})
Q(**{f"attributes__{membership_mapping_attribute}__in": members})
| Q(
**{
f"attributes__{self._source.user_membership_attribute}__isnull": True,
f"attributes__{membership_mapping_attribute}__isnull": True,
"ak_groups__in": [ak_group],
}
)

View File

@ -60,16 +60,18 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
return -1
user_count = 0
for user in page_data:
if (attributes := self.get_attributes(user)) is None:
if "attributes" not in user:
continue
attributes = user.get("attributes", {})
user_dn = flatten(user.get("entryDN", user.get("dn")))
if not (uniq := self.get_identifier(attributes)):
if not attributes.get(self._source.object_uniqueness_field):
self.message(
f"Uniqueness field not found/not set in attributes: '{user_dn}'",
attributes=attributes.keys(),
dn=user_dn,
)
continue
uniq = flatten(attributes[self._source.object_uniqueness_field])
try:
defaults = {
k: flatten(v)

View File

@ -17,8 +17,6 @@ from authentik.lib.utils.reflection import class_to_path, path_to_class
from authentik.root.celery import CELERY_APP
from authentik.sources.ldap.models import LDAPSource
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
from authentik.sources.ldap.sync.forward_delete_groups import GroupLDAPForwardDeletion
from authentik.sources.ldap.sync.forward_delete_users import UserLDAPForwardDeletion
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
@ -54,11 +52,11 @@ def ldap_connectivity_check(pk: str | None = None):
@CELERY_APP.task(
# We take the configured hours timeout time by 3.5 as we run user and
# group in parallel and then membership, then deletions, so 3x is to cover the serial tasks,
# We take the configured hours timeout time by 2.5 as we run user and
# group in parallel and then membership, so 2x is to cover the serial tasks,
# and 0.5x on top of that to give some more leeway
soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5,
task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3.5,
soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
)
def ldap_sync_single(source_pk: str):
"""Sync a single source"""
@ -81,25 +79,6 @@ def ldap_sync_single(source_pk: str):
group(
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
),
# Finally, deletions. What we'd really like to do here is something like
# ```
# user_identifiers = <ldap query>
# User.objects.exclude(
# usersourceconnection__identifier__in=user_uniqueness_identifiers,
# ).delete()
# ```
# This runs into performance issues in large installations. So instead we spread the
# work out into three steps:
# 1. Get every object from the LDAP source.
# 2. Mark every object as "safe" in the database. This is quick, but any error could
# mean deleting users which should not be deleted, so we do it immediately, in
# large chunks, and only queue the deletion step afterwards.
# 3. Delete every unmarked item. This is slow, so we spread it over many tasks in
# small chunks.
group(
ldap_sync_paginator(source, UserLDAPForwardDeletion)
+ ldap_sync_paginator(source, GroupLDAPForwardDeletion),
),
)
task()

View File

@ -2,33 +2,6 @@
from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server
# The mock modifies these in place, so we have to define them per string
user_in_slapd_dn = "cn=user_in_slapd_cn,ou=users,dc=goauthentik,dc=io"
user_in_slapd_cn = "user_in_slapd_cn"
user_in_slapd_uid = "user_in_slapd_uid"
user_in_slapd_object_class = "person"
user_in_slapd = {
"dn": user_in_slapd_dn,
"attributes": {
"cn": user_in_slapd_cn,
"uid": user_in_slapd_uid,
"objectClass": user_in_slapd_object_class,
},
}
group_in_slapd_dn = "cn=user_in_slapd_cn,ou=groups,dc=goauthentik,dc=io"
group_in_slapd_cn = "group_in_slapd_cn"
group_in_slapd_uid = "group_in_slapd_uid"
group_in_slapd_object_class = "groupOfNames"
group_in_slapd = {
"dn": group_in_slapd_dn,
"attributes": {
"cn": group_in_slapd_cn,
"uid": group_in_slapd_uid,
"objectClass": group_in_slapd_object_class,
"member": [user_in_slapd["dn"]],
},
}
def mock_slapd_connection(password: str) -> Connection:
"""Create mock SLAPD connection"""
@ -123,14 +96,5 @@ def mock_slapd_connection(password: str) -> Connection:
"objectClass": "posixAccount",
},
)
# Known user and group
connection.strategy.add_entry(
user_in_slapd["dn"],
user_in_slapd["attributes"],
)
connection.strategy.add_entry(
group_in_slapd["dn"],
group_in_slapd["attributes"],
)
connection.bind()
return connection

View File

@ -13,26 +13,14 @@ from authentik.events.system_tasks import TaskStatus
from authentik.lib.generators import generate_id, generate_key
from authentik.lib.sync.outgoing.exceptions import StopSync
from authentik.lib.utils.reflection import class_to_path
from authentik.sources.ldap.models import (
GroupLDAPSourceConnection,
LDAPSource,
LDAPSourcePropertyMapping,
UserLDAPSourceConnection,
)
from authentik.sources.ldap.sync.forward_delete_users import DELETE_CHUNK_SIZE
from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection
from authentik.sources.ldap.tests.mock_slapd import (
group_in_slapd_cn,
group_in_slapd_uid,
mock_slapd_connection,
user_in_slapd_cn,
user_in_slapd_uid,
)
from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection
LDAP_PASSWORD = generate_key()
@ -269,56 +257,12 @@ class LDAPSyncTests(TestCase):
self.source.group_membership_field = "memberUid"
self.source.user_object_filter = "(objectClass=posixAccount)"
self.source.group_object_filter = "(objectClass=posixGroup)"
self.source.user_membership_attribute = "uid"
self.source.user_property_mappings.set(
[
*LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
).all(),
LDAPSourcePropertyMapping.objects.create(
name="name",
expression='return {"attributes": {"uid": list_flatten(ldap.get("uid"))}}',
),
]
)
self.source.group_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
managed="goauthentik.io/sources/ldap/openldap-cn"
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
)
)
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
self.source.save()
user_sync = UserLDAPSynchronizer(self.source)
user_sync.sync_full()
group_sync = GroupLDAPSynchronizer(self.source)
group_sync.sync_full()
membership_sync = MembershipLDAPSynchronizer(self.source)
membership_sync.sync_full()
# Test if membership mapping based on memberUid works.
posix_group = Group.objects.filter(name="group-posix").first()
self.assertTrue(posix_group.users.filter(name="user-posix").exists())
def test_sync_groups_openldap_posix_group_nonstandard_membership_attribute(self):
"""Test posix group sync"""
self.source.object_uniqueness_field = "cn"
self.source.group_membership_field = "memberUid"
self.source.user_object_filter = "(objectClass=posixAccount)"
self.source.group_object_filter = "(objectClass=posixGroup)"
self.source.user_membership_attribute = "cn"
self.source.user_property_mappings.set(
[
*LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
).all(),
LDAPSourcePropertyMapping.objects.create(
name="name",
expression='return {"attributes": {"cn": list_flatten(ldap.get("cn"))}}',
),
]
)
self.source.group_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
managed="goauthentik.io/sources/ldap/openldap-cn"
@ -364,160 +308,3 @@ class LDAPSyncTests(TestCase):
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
def test_user_deletion(self):
"""Test user deletion"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(User.objects.filter(username="not-in-the-source").exists())
def test_user_deletion_still_in_source(self):
"""Test that user is not deleted if it's still in the source"""
username = user_in_slapd_cn
identifier = user_in_slapd_uid
user = User.objects.create_user(username=username)
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier=identifier
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username=username).exists())
def test_user_deletion_no_sync(self):
"""Test that user is not deleted if sync_users is False"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.sync_users = False
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username="not-in-the-source").exists())
def test_user_deletion_no_delete(self):
"""Test that user is not deleted if delete_not_found_objects is False"""
user = User.objects.create_user(username="not-in-the-source")
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(User.objects.filter(username="not-in-the-source").exists())
def test_group_deletion(self):
"""Test group deletion"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(Group.objects.filter(name="not-in-the-source").exists())
def test_group_deletion_still_in_source(self):
"""Test that group is not deleted if it's still in the source"""
groupname = group_in_slapd_cn
identifier = group_in_slapd_uid
group = Group.objects.create(name=groupname)
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier=identifier
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name=groupname).exists())
def test_group_deletion_no_sync(self):
"""Test that group is not deleted if sync_groups is False"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.sync_groups = False
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name="not-in-the-source").exists())
def test_group_deletion_no_delete(self):
"""Test that group is not deleted if delete_not_found_objects is False"""
group = Group.objects.create(name="not-in-the-source")
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier="not-in-the-source"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertTrue(Group.objects.filter(name="not-in-the-source").exists())
def test_batch_deletion(self):
"""Test batch deletion"""
BATCH_SIZE = DELETE_CHUNK_SIZE + 1
for i in range(BATCH_SIZE):
user = User.objects.create_user(username=f"not-in-the-source-{i}")
group = Group.objects.create(name=f"not-in-the-source-{i}")
group.users.add(user)
UserLDAPSourceConnection.objects.create(
user=user, source=self.source, identifier=f"not-in-the-source-{i}-user"
)
GroupLDAPSourceConnection.objects.create(
group=group, source=self.source, identifier=f"not-in-the-source-{i}-group"
)
self.source.object_uniqueness_field = "uid"
self.source.group_object_filter = "(objectClass=groupOfNames)"
self.source.delete_not_found_objects = True
self.source.save()
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_all.delay().get()
self.assertFalse(User.objects.filter(username__startswith="not-in-the-source").exists())
self.assertFalse(Group.objects.filter(name__startswith="not-in-the-source").exists())

View File

@ -9,7 +9,6 @@ from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404, redirect
from django.utils.decorators import method_decorator
from django.utils.http import urlencode
from django.utils.translation import gettext as _
from django.views import View
from django.views.decorators.csrf import csrf_exempt
from structlog.stdlib import get_logger
@ -129,9 +128,7 @@ class InitiateView(View):
# otherwise we default to POST_AUTO, with direct redirect
if source.binding_type == SAMLBindingTypes.POST:
injected_stages.append(in_memory_stage(ConsentStageView))
plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = _(
"Continue to {source_name}".format(source_name=source.name)
)
plan_kwargs[PLAN_CONTEXT_CONSENT_HEADER] = f"Continue to {source.name}"
injected_stages.append(in_memory_stage(AutosubmitStageView))
return self.handle_login_flow(
source,

View File

@ -97,8 +97,7 @@ class GroupsView(SCIMObjectView):
self.logger.warning("Invalid group member", exc=exc)
continue
query |= Q(uuid=member.value)
if query:
group.users.set(User.objects.filter(query))
group.users.set(User.objects.filter(query))
if not connection:
connection, _ = SCIMSourceGroup.objects.get_or_create(
source=self.source,

View File

@ -151,7 +151,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
webauthn_user_verification=UserVerification.PREFERRED,
)
stage.webauthn_allowed_device_types.set(
WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series")
WebAuthnDeviceType.objects.filter(
description="Android Authenticator with SafetyNet Attestation"
)
)
session = self.client.session
plan = FlowPlan(flow_pk=flow.pk.hex)
@ -337,7 +339,9 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
device_classes=[DeviceClasses.WEBAUTHN],
)
stage.webauthn_allowed_device_types.set(
WebAuthnDeviceType.objects.filter(description="YubiKey 5 Series")
WebAuthnDeviceType.objects.filter(
description="Android Authenticator with SafetyNet Attestation"
)
)
session = self.client.session
plan = FlowPlan(flow_pk=flow.pk.hex)

View File

@ -143,18 +143,13 @@
"icon_light": "data:image/svg+xml;base64,CjxzdmcgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB3aWR0aD0iNzJweCIgaGVpZ2h0PSI3MnB4IiB2aWV3Qm94PSIwIDAgNzIgNzIiPgk8ZGVmcz4KICAgICAgICA8ZmlsdGVyIGlkPSJhIiB3aWR0aD0iMjAwJSIgaGVpZ2h0PSIyMDAlIj4KICAgICAgICAgICAgPGZlT2Zmc2V0IHJlc3VsdD0ib2ZmT3V0IiBpbj0iU291cmNlQWxwaGEiIGR5PSIyLjIiLz4KICAgICAgICAgICAgPGZlR2F1c3NpYW5CbHVyIHJlc3VsdD0iYmx1ck91dCIgaW49Im9mZk91dCIgc3RkRGV2aWF0aW9uPSIxLjUiLz4KICAgICAgICAgICAgPGZlQ29sb3JNYXRyaXggdmFsdWVzPSIwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwIDAgMCAwLjQgMCIvPgogICAgICAgICAgICA8ZmVNZXJnZT4KICAgICAgICAgICAgICAgIDxmZU1lcmdlTm9kZS8+CiAgICAgICAgICAgICAgICA8ZmVNZXJnZU5vZGUgaW49IlNvdXJjZUdyYXBoaWMiLz4KICAgICAgICAgICAgPC9mZU1lcmdlPgogICAgICAgIDwvZmlsdGVyPgogICAgPC9kZWZzPgo8cGF0aCBmaWxsPSIjZmZmZmZmIiBmaWx0ZXI9InVybCgjYSkiIGQ9Ik0zMS4wNTksNS4zOTVjMTYuODktMi43MjcsMzIuODE3LDguNzcyLDM1LjU0NSwyNS42NjJjMi43MjcsMTYuODktOC43NzIsMzIuODE3LTI1LjY2MiwzNS41NDUKCUMyNC4wNTEsNjkuMzI5LDguMTI0LDU3LjgzLDUuMzk3LDQwLjk0QzIuNjcsMjQuMDQ5LDE0LjE2OSw4LjEyMiwzMS4wNTksNS4zOTV6Ii8+CjxwYXRoIGZpbGw9IiMwMDY4YzMiIGQ9Ik01NS4zNjQsMTcuMjAyYy01LjA0Ny01LjE5Ny0xMS44MDItOC4xMDktMTkuMDItOC4yYy03LjE5MS0wLjA5LTEzLjk4NSwyLjY2OS0xOS4xNDksNy43NjgKCUMxMS45MSwyMS45ODksOSwyOC45NTUsOSwzNi4zOTJsMC4wNDQsMS4yNmMwLDEuMTgxLDAuOTYxLDIuMTQyLDIuMTQyLDIuMTQyczIuMTQxLTAuOTYsMi4xNDItMi4xNGwwLjAxLTAuOTEzCgljMC05Ljk0NSw4LjQ1My0yMC41OTMsMjEuMDM1LTIwLjU5M2MxMy4xMzIsMCwyMS4yNjEsMTAuNzEyLDIxLjI2MSwyMC42MzdjMCw1LjE3My0yLjA2Myw5LjkxOS01LjgwOCwxMy4zNjMKCUM0Ni4xNyw1My41MDksNDEuMjYsNTUuMzYsMzYsNTUuMzZjLTMuMTMsMC02LjIxOS0wLjc1NS04Ljk1OC0yLjE4NmwxOC44Ny04LjY4NmMxLjI2Ny0wLjU4MywyLjExNS0xLjgxLDIuMjEzLTMuMjAxCglzLTAuNTY5LTIuNzI1LTEuNzU0LTMuNDg3bC0xNS43MDYtOC40MTVjLTAuNTU0LTAuMzU3LTEuMjEzLTAuNDc3LTEuODU4LTAuMzM3Yy0wLjY0NCwwLjEzOS0xLjE5NSwwLjUyMS0xLjU1MiwxLjA3NQoJYy0wLjczNywxLjE0NC0wLjQwNiwyLjY3MywwLjcyMSwzLjM5N2w4LjQ1NCw2LjkyM2wtMTguNDE5LDguNDc4Yy0xLjQyNywwLjY1Ny0yLjI5OCwyLjEtMi4yMTgsMy42NzUKCWMwLjA0OCwwLjk0OSwwLjQ5MywxLjg4NCwxLjI1MywyLjYzNEMyMi4xNTgsNjAuMjY5LDI4Ljg0LDYzLDM1Ljk4NSw2M2MwLjQ0NSwwLDAuODkyLTAuMDExLDEuMzQxLTAuMDMyCgljMTQuMTU2LTAuNjczLDI1LjQzMi0xMi4zMTUsMjUuNjcxLTI2LjUwNUM2My4xMTgsMjkuMjM2LDYwLjQwNywyMi4zOTYsNTUuMzY0LDE3LjIwMnoiLz4KPC9zdmc+Cg=="
},
"22248c4c-7a12-46e2-9a41-44291b373a4d": {
"name": "LogMeOnce",
"icon_dark": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcgdmVyc2lvbj0iMS4xIiB2aWV3Qm94PSIwIDAgMjA0OCAyMDQ4IiB3aWR0aD0iMTI4IiBoZWlnaHQ9IjEyOCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTSAwIDAgTCAtNSAxIEwgLTE1IDQgTCAtMzggMTEgTCAtNTcgMTggTCAtNzYgMjkgTCAtOTAgMzggTCAtMTA5IDUyIEwgLTExNiA1OSBMIC0xMjcgNjggTCAtMTQwIDgxIEwgLTE0OCA4OCBMIC0xNDkgOTEgTCAtMTUxIDkxIEwgLTE2MSAxMDEgTCAtMTY5IDEwOCBMIC0xNjkgMTEwIEwgLTE3MSAxMTAgTCAtMTgwIDExOSBMIC0xODggMTI2IEwgLTIwMSAxMzcgTCAtMjEyIDE0NiBMIC0yMjYgMTU3IEwgLTIzOSAxNjcgTCAtMjUwIDE3NSBMIC0yNzAgMTg4IEwgLTI5MSAyMDIgTCAtMzA5IDIxMiBMIC0zMzAgMjI0IEwgLTM1MyAyMzUgTCAtMzg2IDI1MCBMIC00MDcgMjU5IEwgLTQ0OSAyNzMgTCAtNDcyIDI4MSBMIC01MDIgMjg5IEwgLTU1MSAyOTggTCAtNjE1IDMwOSBMIC02NDcgMzE2IEwgLTY4MSAzMjQgTCAtNjk4IDMzMCBMIC03MTQgMzM4IEwgLTczNSAzNTEgTCAtNzQ1IDM1OCBMIC03NTYgMzY3IEwgLTc2NCAzNzUgTCAtNzY2IDM3OSBMIC03NjggMzc5IEwgLTc3MyAzODUgTCAtNzgxIDM5NSBMIC03OTEgNDEwIEwgLTgwMSA0MjggTCAtODEwIDQ0NyBMIC04MTYgNDY1IEwgLTgyMiA1MDAgTCAtODI3IDU0NSBMIC04MzAgNTkyIEwgLTgzMiA2NTMgTCAtODMyIDcxMiBMIC04MzEgNzQyIEwgLTgyOCA3OTQgTCAtODIyIDg2NiBMIC04MTYgOTI4IEwgLTgxMiA5NTcgTCAtODAwIDEwMjAgTCAtNzg5IDEwNzIgTCAtNzgzIDEwOTggTCAtNzY1IDExNjIgTCAtNzUxIDEyMDcgTCAtNzQ0IDEyMjUgTCAtNzM0IDEyNTQgTCAtNzIwIDEyOTAgTCAtNzA0IDEzMjggTCAtNjg5IDEzNjEgTCAtNjY4IDE0MDMgTCAtNjUzIDE0MzEgTCAtNjM5IDE0NTYgTCAtNjIwIDE0ODggTCAtNjA3IDE1MDkgTCAtNTk0IDE1MjkgTCAtNTg0IDE1NDQgTCAtNTcxIDE1NjMgTCAtNTU4IDE1ODEgTCAtNTQ0IDE2MDAgTCAtNTMzIDE2MTUgTCAtNTIwIDE2MzIgTCAtNTA3IDE2NDggTCAtNDc5IDE2ODIgTCAtNDcwIDE2OTMgTCAtNDYwIDE3MDQgTCAtNDQ5IDE3MTYgTCAtNDQyIDE3MjQgTCAtNDMyIDE3MzQgTCAtNDI1IDE3NDIgTCAtMzY4IDE3OTkgTCAtMzUxIDE4MTUgTCAtMzM1IDE4MzAgTCAtMzI3IDE4MzcgTCAtMzE0IDE4NDkgTCAtMjg4IDE4NzEgTCAtMjcxIDE4ODUgTCAtMjYxIDE4OTMgTCAtMjQ0IDE5MDcgTCAtMjI4IDE5MTkgTCAtMjE1IDE5MjkgTCAtMTk2IDE5NDMgTCAtMTg0IDE5NTIgTCAtMTY4IDE5NjMgTCAtMTQ4IDE5NzcgTCAtMTMzIDE5ODcgTCAtMTEyIDIwMDAgTCAtODggMjAxMyBMIC03MiAyMDIxIEwgLTUyIDIwMzAgTCAtMzIgMjAzNyBMIDEyIDIwNDggTCA2MCAyMDQ4IEwgNjEgMjA0NyBMIDczIDIwNDQgTCAxMDAgMjAzNyBMIDEyMSAyMDI5IEwgMTQ5IDIwMTUgTCAxNjcgMjAwNSBMIDE4NiAxOTk0IEwgMjEwIDE5NzkgTCAyMzEgMTk2NSBMIDI2NiAxOTQxIEwgMjg0IDE5MjggTCAyOTUgMTkyMCBMIDMwNiAxOTExIEwgMzI0IDE4OTggTCAzMzggMTg4NyBMIDM0OSAxODc3IEwgMzU4IDE4NzAgTCAzNjkgMTg2MCBMIDM4MCAxODUxIEwgMzgwIDE4NDkgTCAzODIgMTg0OSBMIDM4NSAxODQ2IEwgMzk2IDE4MzcgTCA0MDMgMTgzMCBMIDQxMSAxODIzIEwgNDE4IDE4MTYgTCA0MjYgMTgwOSBMIDQ3NSAxNzYwIEwgNDc3IDE3NTYgTCA0NzkgMTc1NiBMIDQ4NyAxNzQ3IEwgNTAzIDE3MzAgTCA1MTAgMTcyMiBMIDUxOSAxNzEyIEwgNTI2IDE3MDQgTCA1MzggMTY5MSBMIDU0NyAxNjgwIEwgNTU4IDE2NjcgTCA1NjYgMTY1NiBMIDU3OSAxNjQwIEwgNTkwIDE2MjYgTCA2MDYgMTYwNSBMIDYyMSAxNTgzIEwgNjMxIDE1NjkgTCA2NDUgMTU0OSBMIDY1NSAxNTMzIEwgNjcwIDE1MTAgTCA2ODUgMTQ4NSBMIDY5NiAxNDY3IEwgNzA2IDE0NDkgTCA3MzAgMTQwNSBMIDc0NSAxMzc0IEwgNzYyIDEzMzggTCA3NzQgMTMxMCBMIDc5MiAxMjY3IEwgODExIDEyMTYgTCA4MjcgMTE2NiBMIDg0MiAxMTE3IEwgODU4IDEwNTEgTCA4NzAgOTk2IEwgODc1IDk2NyBMIDg4MSA5MTcgTCA4ODYgODgyIEwgODkxIDg0NiBMIDg5NCA4MTIgTCA4OTYgNzc0IEwgODk2IDY1NSBMIDg5NSA2MzAgTCA4OTIgNTkzIEwgODg4IDU1OSBMIDg3NyA0NzAgTCA4NzIgNDQ1IEwgODY1IDQyNiBMIDg1NyA0MTEgTCA4NDcgMzk1IEwgODM0IDM3OCBMIDgxNCAzNTggTCA4MDAgMzQ3IEwgNzg1IDMzNyBMIDc2NiAzMjcgTCA3NTAgMzIxIEwgNzI3IDMxNSBMIDY5MyAzMDggTCA2NTAgMzAxIEwgNTk3IDI5MiBMIDU3MCAyODYgTCA1NDggMjgwIEwgNTE1IDI3MCBMIDQ4NyAyNjEgTCA0NTkgMjUwIEwgNDM4IDI0MSBMIDQxNCAyMzAgTCAzODIgMjEzIEwgMzU4IDE5OSBMIDM0NyAxOTIgTCAzMzAgMTgwIEwgMzEyIDE2NyBMIDMwMCAxNTcgTCAyODYgMTQ2IEwgMjcxIDEzMyBMIDI2MyAxMjYgTCAyNTEgMTE1IEwgMjQwIDEwNiBMIDIyOCA5NSBMIDIwMCA3MSBMIDE4OSA2MiBMIDE3NCA1MCBMIDE1OCAzOCBMIDE0MiAyNyBMIDEyNyAxOSBMIDExNSAxNCBMIDk1IDggTCA3MSAwIEwgMCAwIHogTSAzMCA2MjIgTCA1MSA2MjMgTCA3OSA2MjcgTCAxMDAgNjMzIEwgMTIxIDY0MSBMIDE0MSA2NTIgTCAxNTcgNjYzIEwgMTcyIDY3NiBMIDE4NiA2OTAgTCAxODYgNjkyIEwgMTg4IDY5MiBMIDIwNiA3MTYgTCAyMTYgNzM0IEwgMjIxIDc0NSBMIDIyNiA3NTggTCAyMzMgNzg0IEwgMjM2IDc5OCBMIDIzNyA4MDggTCAyMzcgODQwIEwgMjMzIDg2NSBMIDIyNiA4OTEgTCAyMTkgOTA5IEwgMjA5IDkyOCBMIDE5NSA5NDkgTCAxODYgOTU5IEwgMTc5IDk2NyBMIDE2NiA5ODAgTCAxNTUgOTg5IEwgMTQyIDk5OCBMIDEyNiAxMDA3IEwgMTEzIDEwMTIgTCAxMTQgMTAyNCBMIDEyMiAxMDYwIEwgMTM3IDExMjMgTCAxNTYgMTIwOCBMIDE3MCAxMjcxIEwgMTc0IDEyOTYgTCAxNzYgMTMxMCBMIDE3NiAxMzM1IEwgMTczIDEzNDkgTCAxNjYgMTM1OSBMIDE1NyAxMzY3IEwgMTQ2IDEzNzIgTCAxNDAgMTM3NCBMIDExOSAxMzc2IEwgNTEgMTM3NiBMIC0zMiAxMzc3IEwgLTY0IDEzNzcgTCAtNzggMTM3NCBMIC04NSAxMzcwIEwgLTk4IDEzNTkgTCAtMTA2IDEzNDkgTCAtMTEwIDEzNDEgTCAtMTEyIDEzMzMgTCAtMTEyIDEzMTggTCAtMTA3IDEyODggTCAtMTAwIDEyNTYgTCAtODUgMTE5NiBMIC02OSAxMTI0IEwgLTU3IDEwNzIgTCAtNTAgMTAzNiBMIC00NyAxMDExIEwgLTUzIDEwMDkgTCAtNjkgMTAwMSBMIC04MSA5OTQgTCAtOTMgOTg1IEwgLTEwMyA5NzYgTCAtMTExIDk2OSBMIC0xMjAgOTYwIEwgLTEzMSA5NDYgTCAtMTQyIDkyOSBMIC0xNTIgOTEwIEwgLTE2MCA4OTAgTCAtMTY2IDg3MCBMIC0xNjkgODUzIEwgLTE3MCA4NDQgTCAtMTcwIDgxNCBMIC0xNjcgNzk0IEwgLTE2MSA3NjcgTCAtMTU0IDc0NiBMIC0xMzcgNzEzIEwgLTEyNiA2OTggTCAtMTE5IDY5MCBMIC0xMDggNjc4IEwgLTkzIDY2NSBMIC03NyA2NTMgTCAtNTYgNjQxIEwgLTM4IDYzMyBMIC0xNiA2MjcgTCAzIDYyNCBMIDMwIDYyMiB6ICIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoOTkxKSIgc3R5bGU9ImZpbGw6I2ZmZmZmZiIgLz4KPC9zdmc+Cg==",
"icon_light": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB2ZXJzaW9uPSIxLjEiIHZpZXdCb3g9IjAgMCAyMDQ4IDIwNDgiIHdpZHRoPSIxMjgiIGhlaWdodD0iMTI4IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgo8cGF0aCB0cmFuc2Zvcm09InRyYW5zbGF0ZSg5OTEpIiBkPSJtMCAwaDcxbDI0IDggMjAgNiAxMiA1IDE1IDggMTYgMTEgMTYgMTIgMTUgMTIgMTEgOSAyOCAyNCAxMiAxMSAxMSA5IDEyIDExIDggNyAxNSAxMyAxNCAxMSAxMiAxMCAxOCAxMyAxNyAxMiAxMSA3IDI0IDE0IDMyIDE3IDI0IDExIDIxIDkgMjggMTEgMjggOSAzMyAxMCAyMiA2IDI3IDYgNTMgOSA0MyA3IDM0IDcgMjMgNiAxNiA2IDE5IDEwIDE1IDEwIDE0IDExIDIwIDIwIDEzIDE3IDEwIDE2IDggMTUgNyAxOSA1IDI1IDExIDg5IDQgMzQgMyAzNyAxIDI1djExOWwtMiAzOC0zIDM0LTUgMzYtNSAzNS02IDUwLTUgMjktMTIgNTUtMTYgNjYtMTUgNDktMTYgNTAtMTkgNTEtMTggNDMtMTIgMjgtMTcgMzYtMTUgMzEtMjQgNDQtMTAgMTgtMTEgMTgtMTUgMjUtMTUgMjMtMTAgMTYtMTQgMjAtMTAgMTQtMTUgMjItMTYgMjEtMTEgMTQtMTMgMTYtOCAxMS0xMSAxMy05IDExLTEyIDEzLTcgOC05IDEwLTcgOC0xNiAxNy04IDloLTJsLTIgNC00OSA0OS04IDctNyA3LTggNy03IDctMTEgOS0zIDNoLTJ2MmwtMTEgOS0xMSAxMC05IDctMTEgMTAtMTQgMTEtMTggMTMtMTEgOS0xMSA4LTE4IDEzLTM1IDI0LTIxIDE0LTI0IDE1LTE5IDExLTE4IDEwLTI4IDE0LTIxIDgtMjcgNy0xMiAzLTEgMWgtNDhsLTQ0LTExLTIwLTctMjAtOS0xNi04LTI0LTEzLTIxLTEzLTE1LTEwLTIwLTE0LTE2LTExLTEyLTktMTktMTQtMTMtMTAtMTYtMTItMTctMTQtMTAtOC0xNy0xNC0yNi0yMi0xMy0xMi04LTctMTYtMTUtMTctMTYtNTctNTctNy04LTEwLTEwLTctOC0xMS0xMi0xMC0xMS05LTExLTI4LTM0LTEzLTE2LTEzLTE3LTExLTE1LTE0LTE5LTEzLTE4LTEzLTE5LTEwLTE1LTEzLTIwLTEzLTIxLTE5LTMyLTE0LTI1LTE1LTI4LTIxLTQyLTE1LTMzLTE2LTM4LTE0LTM2LTEwLTI5LTctMTgtMTQtNDUtMTgtNjQtNi0yNi0xMS01Mi0xMi02My00LTI5LTYtNjItNi03Mi0zLTUyLTEtMzB2LTU5bDItNjEgMy00NyA1LTQ1IDYtMzUgNi0xOCA5LTE5IDEwLTE4IDEwLTE1IDgtMTAgNS02aDJsMi00IDgtOCAxMS05IDEwLTcgMjEtMTMgMTYtOCAxNy02IDM0LTggMzItNyA2NC0xMSA0OS05IDMwLTggMjMtOCA0Mi0xNCAyMS05IDMzLTE1IDIzLTExIDIxLTEyIDE4LTEwIDIxLTE0IDIwLTEzIDExLTggMTMtMTAgMTQtMTEgMTEtOSAxMy0xMSA4LTcgOS05aDJ2LTJsOC03IDEwLTEwaDJsMS0zIDgtNyAxMy0xMyAxMS05IDctNyAxOS0xNCAxNC05IDE5LTExIDE5LTcgMjMtNyAxMC0zeiIgZmlsbD0iI0YxODQyOSIvPgo8cGF0aCB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMDIxLDYyMikiIGQ9Im0wIDAgMjEgMSAyOCA0IDIxIDYgMjEgOCAyMCAxMSAxNiAxMSAxNSAxMyAxNCAxNHYyaDJsMTggMjQgMTAgMTggNSAxMSA1IDEzIDcgMjYgMyAxNCAxIDEwdjMybC00IDI1LTcgMjYtNyAxOC0xMCAxOS0xNCAyMS05IDEwLTcgOC0xMyAxMy0xMSA5LTEzIDktMTYgOS0xMyA1IDEgMTIgOCAzNiAxNSA2MyAxOSA4NSAxNCA2MyA0IDI1IDIgMTR2MjVsLTMgMTQtNyAxMC05IDgtMTEgNS02IDItMjEgMmgtNjhsLTgzIDFoLTMybC0xNC0zLTctNC0xMy0xMS04LTEwLTQtOC0yLTh2LTE1bDUtMzAgNy0zMiAxNS02MCAxNi03MiAxMi01MiA3LTM2IDMtMjUtNi0yLTE2LTgtMTItNy0xMi05LTEwLTktOC03LTktOS0xMS0xNC0xMS0xNy0xMC0xOS04LTIwLTYtMjAtMy0xNy0xLTl2LTMwbDMtMjAgNi0yNyA3LTIxIDE3LTMzIDExLTE1IDctOCAxMS0xMiAxNS0xMyAxNi0xMiAyMS0xMiAxOC04IDIyLTYgMTktM3oiIGZpbGw9IiNGRUZGRkUiLz4KPC9zdmc+Cg=="
},
"name": "LogMeOnce",
"icon_dark": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcgdmVyc2lvbj0iMS4xIiB2aWV3Qm94PSIwIDAgMjA0OCAyMDQ4IiB3aWR0aD0iMTI4IiBoZWlnaHQ9IjEyOCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTSAwIDAgTCAtNSAxIEwgLTE1IDQgTCAtMzggMTEgTCAtNTcgMTggTCAtNzYgMjkgTCAtOTAgMzggTCAtMTA5IDUyIEwgLTExNiA1OSBMIC0xMjcgNjggTCAtMTQwIDgxIEwgLTE0OCA4OCBMIC0xNDkgOTEgTCAtMTUxIDkxIEwgLTE2MSAxMDEgTCAtMTY5IDEwOCBMIC0xNjkgMTEwIEwgLTE3MSAxMTAgTCAtMTgwIDExOSBMIC0xODggMTI2IEwgLTIwMSAxMzcgTCAtMjEyIDE0NiBMIC0yMjYgMTU3IEwgLTIzOSAxNjcgTCAtMjUwIDE3NSBMIC0yNzAgMTg4IEwgLTI5MSAyMDIgTCAtMzA5IDIxMiBMIC0zMzAgMjI0IEwgLTM1MyAyMzUgTCAtMzg2IDI1MCBMIC00MDcgMjU5IEwgLTQ0OSAyNzMgTCAtNDcyIDI4MSBMIC01MDIgMjg5IEwgLTU1MSAyOTggTCAtNjE1IDMwOSBMIC02NDcgMzE2IEwgLTY4MSAzMjQgTCAtNjk4IDMzMCBMIC03MTQgMzM4IEwgLTczNSAzNTEgTCAtNzQ1IDM1OCBMIC03NTYgMzY3IEwgLTc2NCAzNzUgTCAtNzY2IDM3OSBMIC03NjggMzc5IEwgLTc3MyAzODUgTCAtNzgxIDM5NSBMIC03OTEgNDEwIEwgLTgwMSA0MjggTCAtODEwIDQ0NyBMIC04MTYgNDY1IEwgLTgyMiA1MDAgTCAtODI3IDU0NSBMIC04MzAgNTkyIEwgLTgzMiA2NTMgTCAtODMyIDcxMiBMIC04MzEgNzQyIEwgLTgyOCA3OTQgTCAtODIyIDg2NiBMIC04MTYgOTI4IEwgLTgxMiA5NTcgTCAtODAwIDEwMjAgTCAtNzg5IDEwNzIgTCAtNzgzIDEwOTggTCAtNzY1IDExNjIgTCAtNzUxIDEyMDcgTCAtNzQ0IDEyMjUgTCAtNzM0IDEyNTQgTCAtNzIwIDEyOTAgTCAtNzA0IDEzMjggTCAtNjg5IDEzNjEgTCAtNjY4IDE0MDMgTCAtNjUzIDE0MzEgTCAtNjM5IDE0NTYgTCAtNjIwIDE0ODggTCAtNjA3IDE1MDkgTCAtNTk0IDE1MjkgTCAtNTg0IDE1NDQgTCAtNTcxIDE1NjMgTCAtNTU4IDE1ODEgTCAtNTQ0IDE2MDAgTCAtNTMzIDE2MTUgTCAtNTIwIDE2MzIgTCAtNTA3IDE2NDggTCAtNDc5IDE2ODIgTCAtNDcwIDE2OTMgTCAtNDYwIDE3MDQgTCAtNDQ5IDE3MTYgTCAtNDQyIDE3MjQgTCAtNDMyIDE3MzQgTCAtNDI1IDE3NDIgTCAtMzY4IDE3OTkgTCAtMzUxIDE4MTUgTCAtMzM1IDE4MzAgTCAtMzI3IDE4MzcgTCAtMzE0IDE4NDkgTCAtMjg4IDE4NzEgTCAtMjcxIDE4ODUgTCAtMjYxIDE4OTMgTCAtMjQ0IDE5MDcgTCAtMjI4IDE5MTkgTCAtMjE1IDE5MjkgTCAtMTk2IDE5NDMgTCAtMTg0IDE5NTIgTCAtMTY4IDE5NjMgTCAtMTQ4IDE5NzcgTCAtMTMzIDE5ODcgTCAtMTEyIDIwMDAgTCAtODggMjAxMyBMIC03MiAyMDIxIEwgLTUyIDIwMzAgTCAtMzIgMjAzNyBMIDEyIDIwNDggTCA2MCAyMDQ4IEwgNjEgMjA0NyBMIDczIDIwNDQgTCAxMDAgMjAzNyBMIDEyMSAyMDI5IEwgMTQ5IDIwMTUgTCAxNjcgMjAwNSBMIDE4NiAxOTk0IEwgMjEwIDE5NzkgTCAyMzEgMTk2NSBMIDI2NiAxOTQxIEwgMjg0IDE5MjggTCAyOTUgMTkyMCBMIDMwNiAxOTExIEwgMzI0IDE4OTggTCAzMzggMTg4NyBMIDM0OSAxODc3IEwgMzU4IDE4NzAgTCAzNjkgMTg2MCBMIDM4MCAxODUxIEwgMzgwIDE4NDkgTCAzODIgMTg0OSBMIDM4NSAxODQ2IEwgMzk2IDE4MzcgTCA0MDMgMTgzMCBMIDQxMSAxODIzIEwgNDE4IDE4MTYgTCA0MjYgMTgwOSBMIDQ3NSAxNzYwIEwgNDc3IDE3NTYgTCA0NzkgMTc1NiBMIDQ4NyAxNzQ3IEwgNTAzIDE3MzAgTCA1MTAgMTcyMiBMIDUxOSAxNzEyIEwgNTI2IDE3MDQgTCA1MzggMTY5MSBMIDU0NyAxNjgwIEwgNTU4IDE2NjcgTCA1NjYgMTY1NiBMIDU3OSAxNjQwIEwgNTkwIDE2MjYgTCA2MDYgMTYwNSBMIDYyMSAxNTgzIEwgNjMxIDE1NjkgTCA2NDUgMTU0OSBMIDY1NSAxNTMzIEwgNjcwIDE1MTAgTCA2ODUgMTQ4NSBMIDY5NiAxNDY3IEwgNzA2IDE0NDkgTCA3MzAgMTQwNSBMIDc0NSAxMzc0IEwgNzYyIDEzMzggTCA3NzQgMTMxMCBMIDc5MiAxMjY3IEwgODExIDEyMTYgTCA4MjcgMTE2NiBMIDg0MiAxMTE3IEwgODU4IDEwNTEgTCA4NzAgOTk2IEwgODc1IDk2NyBMIDg4MSA5MTcgTCA4ODYgODgyIEwgODkxIDg0NiBMIDg5NCA4MTIgTCA4OTYgNzc0IEwgODk2IDY1NSBMIDg5NSA2MzAgTCA4OTIgNTkzIEwgODg4IDU1OSBMIDg3NyA0NzAgTCA4NzIgNDQ1IEwgODY1IDQyNiBMIDg1NyA0MTEgTCA4NDcgMzk1IEwgODM0IDM3OCBMIDgxNCAzNTggTCA4MDAgMzQ3IEwgNzg1IDMzNyBMIDc2NiAzMjcgTCA3NTAgMzIxIEwgNzI3IDMxNSBMIDY5MyAzMDggTCA2NTAgMzAxIEwgNTk3IDI5MiBMIDU3MCAyODYgTCA1NDggMjgwIEwgNTE1IDI3MCBMIDQ4NyAyNjEgTCA0NTkgMjUwIEwgNDM4IDI0MSBMIDQxNCAyMzAgTCAzODIgMjEzIEwgMzU4IDE5OSBMIDM0NyAxOTIgTCAzMzAgMTgwIEwgMzEyIDE2NyBMIDMwMCAxNTcgTCAyODYgMTQ2IEwgMjcxIDEzMyBMIDI2MyAxMjYgTCAyNTEgMTE1IEwgMjQwIDEwNiBMIDIyOCA5NSBMIDIwMCA3MSBMIDE4OSA2MiBMIDE3NCA1MCBMIDE1OCAzOCBMIDE0MiAyNyBMIDEyNyAxOSBMIDExNSAxNCBMIDk1IDggTCA3MSAwIEwgMCAwIHogTSAzMCA2MjIgTCA1MSA2MjMgTCA3OSA2MjcgTCAxMDAgNjMzIEwgMTIxIDY0MSBMIDE0MSA2NTIgTCAxNTcgNjYzIEwgMTcyIDY3NiBMIDE4NiA2OTAgTCAxODYgNjkyIEwgMTg4IDY5MiBMIDIwNiA3MTYgTCAyMTYgNzM0IEwgMjIxIDc0NSBMIDIyNiA3NTggTCAyMzMgNzg0IEwgMjM2IDc5OCBMIDIzNyA4MDggTCAyMzcgODQwIEwgMjMzIDg2NSBMIDIyNiA4OTEgTCAyMTkgOTA5IEwgMjA5IDkyOCBMIDE5NSA5NDkgTCAxODYgOTU5IEwgMTc5IDk2NyBMIDE2NiA5ODAgTCAxNTUgOTg5IEwgMTQyIDk5OCBMIDEyNiAxMDA3IEwgMTEzIDEwMTIgTCAxMTQgMTAyNCBMIDEyMiAxMDYwIEwgMTM3IDExMjMgTCAxNTYgMTIwOCBMIDE3MCAxMjcxIEwgMTc0IDEyOTYgTCAxNzYgMTMxMCBMIDE3NiAxMzM1IEwgMTczIDEzNDkgTCAxNjYgMTM1OSBMIDE1NyAxMzY3IEwgMTQ2IDEzNzIgTCAxNDAgMTM3NCBMIDExOSAxMzc2IEwgNTEgMTM3NiBMIC0zMiAxMzc3IEwgLTY0IDEzNzcgTCAtNzggMTM3NCBMIC04NSAxMzcwIEwgLTk4IDEzNTkgTCAtMTA2IDEzNDkgTCAtMTEwIDEzNDEgTCAtMTEyIDEzMzMgTCAtMTEyIDEzMTggTCAtMTA3IDEyODggTCAtMTAwIDEyNTYgTCAtODUgMTE5NiBMIC02OSAxMTI0IEwgLTU3IDEwNzIgTCAtNTAgMTAzNiBMIC00NyAxMDExIEwgLTUzIDEwMDkgTCAtNjkgMTAwMSBMIC04MSA5OTQgTCAtOTMgOTg1IEwgLTEwMyA5NzYgTCAtMTExIDk2OSBMIC0xMjAgOTYwIEwgLTEzMSA5NDYgTCAtMTQyIDkyOSBMIC0xNTIgOTEwIEwgLTE2MCA4OTAgTCAtMTY2IDg3MCBMIC0xNjkgODUzIEwgLTE3MCA4NDQgTCAtMTcwIDgxNCBMIC0xNjcgNzk0IEwgLTE2MSA3NjcgTCAtMTU0IDc0NiBMIC0xMzcgNzEzIEwgLTEyNiA2OTggTCAtMTE5IDY5MCBMIC0xMDggNjc4IEwgLTkzIDY2NSBMIC03NyA2NTMgTCAtNTYgNjQxIEwgLTM4IDYzMyBMIC0xNiA2MjcgTCAzIDYyNCBMIDMwIDYyMiB6ICIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoOTkxKSIgc3R5bGU9ImZpbGw6I2ZmZmZmZiIgLz4KPC9zdmc+Cg==",
"icon_light": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB2ZXJzaW9uPSIxLjEiIHZpZXdCb3g9IjAgMCAyMDQ4IDIwNDgiIHdpZHRoPSIxMjgiIGhlaWdodD0iMTI4IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgo8cGF0aCB0cmFuc2Zvcm09InRyYW5zbGF0ZSg5OTEpIiBkPSJtMCAwaDcxbDI0IDggMjAgNiAxMiA1IDE1IDggMTYgMTEgMTYgMTIgMTUgMTIgMTEgOSAyOCAyNCAxMiAxMSAxMSA5IDEyIDExIDggNyAxNSAxMyAxNCAxMSAxMiAxMCAxOCAxMyAxNyAxMiAxMSA3IDI0IDE0IDMyIDE3IDI0IDExIDIxIDkgMjggMTEgMjggOSAzMyAxMCAyMiA2IDI3IDYgNTMgOSA0MyA3IDM0IDcgMjMgNiAxNiA2IDE5IDEwIDE1IDEwIDE0IDExIDIwIDIwIDEzIDE3IDEwIDE2IDggMTUgNyAxOSA1IDI1IDExIDg5IDQgMzQgMyAzNyAxIDI1djExOWwtMiAzOC0zIDM0LTUgMzYtNSAzNS02IDUwLTUgMjktMTIgNTUtMTYgNjYtMTUgNDktMTYgNTAtMTkgNTEtMTggNDMtMTIgMjgtMTcgMzYtMTUgMzEtMjQgNDQtMTAgMTgtMTEgMTgtMTUgMjUtMTUgMjMtMTAgMTYtMTQgMjAtMTAgMTQtMTUgMjItMTYgMjEtMTEgMTQtMTMgMTYtOCAxMS0xMSAxMy05IDExLTEyIDEzLTcgOC05IDEwLTcgOC0xNiAxNy04IDloLTJsLTIgNC00OSA0OS04IDctNyA3LTggNy03IDctMTEgOS0zIDNoLTJ2MmwtMTEgOS0xMSAxMC05IDctMTEgMTAtMTQgMTEtMTggMTMtMTEgOS0xMSA4LTE4IDEzLTM1IDI0LTIxIDE0LTI0IDE1LTE5IDExLTE4IDEwLTI4IDE0LTIxIDgtMjcgNy0xMiAzLTEgMWgtNDhsLTQ0LTExLTIwLTctMjAtOS0xNi04LTI0LTEzLTIxLTEzLTE1LTEwLTIwLTE0LTE2LTExLTEyLTktMTktMTQtMTMtMTAtMTYtMTItMTctMTQtMTAtOC0xNy0xNC0yNi0yMi0xMy0xMi04LTctMTYtMTUtMTctMTYtNTctNTctNy04LTEwLTEwLTctOC0xMS0xMi0xMC0xMS05LTExLTI4LTM0LTEzLTE2LTEzLTE3LTExLTE1LTE0LTE5LTEzLTE4LTEzLTE5LTEwLTE1LTEzLTIwLTEzLTIxLTE5LTMyLTE0LTI1LTE1LTI4LTIxLTQyLTE1LTMzLTE2LTM4LTE0LTM2LTEwLTI5LTctMTgtMTQtNDUtMTgtNjQtNi0yNi0xMS01Mi0xMi02My00LTI5LTYtNjItNi03Mi0zLTUyLTEtMzB2LTU5bDItNjEgMy00NyA1LTQ1IDYtMzUgNi0xOCA5LTE5IDEwLTE4IDEwLTE1IDgtMTAgNS02aDJsMi00IDgtOCAxMS05IDEwLTcgMjEtMTMgMTYtOCAxNy02IDM0LTggMzItNyA2NC0xMSA0OS05IDMwLTggMjMtOCA0Mi0xNCAyMS05IDMzLTE1IDIzLTExIDIxLTEyIDE4LTEwIDIxLTE0IDIwLTEzIDExLTggMTMtMTAgMTQtMTEgMTEtOSAxMy0xMSA4LTcgOS05aDJ2LTJsOC03IDEwLTEwaDJsMS0zIDgtNyAxMy0xMyAxMS05IDctNyAxOS0xNCAxNC05IDE5LTExIDE5LTcgMjMtNyAxMC0zeiIgZmlsbD0iI0YxODQyOSIvPgo8cGF0aCB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMDIxLDYyMikiIGQ9Im0wIDAgMjEgMSAyOCA0IDIxIDYgMjEgOCAyMCAxMSAxNiAxMSAxNSAxMyAxNCAxNHYyaDJsMTggMjQgMTAgMTggNSAxMSA1IDEzIDcgMjYgMyAxNCAxIDEwdjMybC00IDI1LTcgMjYtNyAxOC0xMCAxOS0xNCAyMS05IDEwLTcgOC0xMyAxMy0xMSA5LTEzIDktMTYgOS0xMyA1IDEgMTIgOCAzNiAxNSA2MyAxOSA4NSAxNCA2MyA0IDI1IDIgMTR2MjVsLTMgMTQtNyAxMC05IDgtMTEgNS02IDItMjEgMmgtNjhsLTgzIDFoLTMybC0xNC0zLTctNC0xMy0xMS04LTEwLTQtOC0yLTh2LTE1bDUtMzAgNy0zMiAxNS02MCAxNi03MiAxMi01MiA3LTM2IDMtMjUtNi0yLTE2LTgtMTItNy0xMi05LTEwLTktOC03LTktOS0xMS0xNC0xMS0xNy0xMC0xOS04LTIwLTYtMjAtMy0xNy0xLTl2LTMwbDMtMjAgNi0yNyA3LTIxIDE3LTMzIDExLTE1IDctOCAxMS0xMiAxNS0xMyAxNi0xMiAyMS0xMiAxOC04IDIyLTYgMTktM3oiIGZpbGw9IiNGRUZGRkUiLz4KPC9zdmc+Cg=="
},
"a10c6dd9-465e-4226-8198-c7c44b91c555": {
"name": "Kaspersky Password Manager",
"icon_dark": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNTEyIiBoZWlnaHQ9IjUxMiIgdmlld0JveD0iMCAwIDUxMiA1MTIiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGcgY2xpcC1wYXRoPSJ1cmwoI2NsaXAwXzc0ODRfODc0NSkiPjxnIGNsaXAtcGF0aD0idXJsKCNjbGlwMV83NDg0Xzg3NDUpIj48ZyBjbGlwLXBhdGg9InVybCgjY2xpcDJfNzQ4NF84NzQ1KSI+PHBhdGggZD0iTTI4MS45NjQgNi45NjE3MUMyNjUuODkzIC0yLjI5OTc0IDI0Ni4xMDcgLTIuMjk5NzQgMjMwLjAzNiA2Ljk2MTcxTDQ2LjAzNjUgMTEzLjAwMkMyOS45MjcxIDEyMi4yODYgMjAgMTM5LjQ2NiAyMCAxNTguMDZWMzUzLjk3MkMyMCAzNzIuNTY2IDI5LjkyNzEgMzg5Ljc0NSA0Ni4wMzY1IDM5OS4wMjlMMjMwLjAzNiA1MDUuMDdDMjQ2LjEwNyA1MTQuMzMxIDI2NS44OTMgNTE0LjMzMSAyODEuOTY0IDUwNS4wN0w0NjUuOTY0IDM5OS4wMjlDNDgyLjA3MyAzODkuNzQ1IDQ5MiAzNzIuNTY2IDQ5MiAzNTMuOTcyVjE1OC4wNkM0OTIgMTM5LjQ2NiA0ODIuMDczIDEyMi4yODYgNDY1Ljk2NCAxMTMuMDAyTDI4MS45NjQgNi45NjE3MVoiIGZpbGw9InVybCgjcGFpbnQwX2xpbmVhcl83NDg0Xzg3NDUpIi8+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik00NTMuOTggMTMzLjc5OEwyNjkuOTggMjcuNzU3NEMyNjEuMzI3IDIyLjc3MDQgMjUwLjY3MyAyMi43NzA0IDI0Mi4wMiAyNy43NTc0TDU4LjAxOTYgMTMzLjc5OEM0OS4zNDUzIDEzOC43OTcgNDQgMTQ4LjA0NyA0NCAxNTguMDZWMzUzLjk3MkM0NCAzNjMuOTg0IDQ5LjM0NTMgMzczLjIzNCA1OC4wMTk2IDM3OC4yMzNMMjQyLjAyIDQ4NC4yNzRDMjUwLjY3MyA0ODkuMjYxIDI2MS4zMjcgNDg5LjI2MSAyNjkuOTggNDg0LjI3NEw0NTMuOTggMzc4LjIzM0M0NjIuNjU1IDM3My4yMzQgNDY4IDM2My45ODQgNDY4IDM1My45NzJWMTU4LjA2QzQ2OCAxNDguMDQ3IDQ2Mi42NTUgMTM4Ljc5NyA0NTMuOTggMTMzLjc5OFpNMjgxLjk2NCA2Ljk2MTcxQzI2NS44OTMgLTIuMjk5NzQgMjQ2LjEwNyAtMi4yOTk3NCAyMzAuMDM2IDYuOTYxNzFMNDYuMDM2NSAxMTMuMDAyQzI5LjkyNzEgMTIyLjI4NiAyMCAxMzkuNDY2IDIwIDE1OC4wNlYzNTMuOTcyQzIwIDM3Mi41NjYgMjkuOTI3MSAzODkuNzQ1IDQ2LjAzNjUgMzk5LjAyOUwyMzAuMDM2IDUwNS4wN0MyNDYuMTA3IDUxNC4zMzEgMjY1Ljg5MyA1MTQuMzMxIDI4MS45NjQgNTA1LjA3TDQ2NS45NjQgMzk5LjAyOUM0ODIuMDczIDM4OS43NDUgNDkyIDM3Mi41NjYgNDkyIDM1My45NzJWMTU4LjA2QzQ5MiAxMzkuNDY2IDQ4Mi4wNzMgMTIyLjI4NiA0NjUuOTY0IDExMy4wMDJMMjgxLjk2NCA2Ljk2MTcxWiIgZmlsbD0iYmxhY2siLz48L2c+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0yNTYgMTQwQzI0Mi43NDUgMTQwIDIzMiAxNTAuNzQ1IDIzMiAxNjRDMjMyIDE3Ny4yNTUgMjQyLjc0NSAxODggMjU2IDE4OEMyNjkuMjU1IDE4OCAyODAgMTc3LjI1NSAyODAgMTY0QzI4MCAxNTAuNzQ1IDI2OS4yNTUgMTQwIDI1NiAxNDBaTTI0OCAxNjRDMjQ4IDE1OS41ODIgMjUxLjU4MiAxNTYgMjU2IDE1NkMyNjAuNDE4IDE1NiAyNjQgMTU5LjU4MiAyNjQgMTY0QzI2NCAxNjguNDE4IDI2MC40MTggMTcyIDI1NiAxNzJDMjUxLjU4MiAxNzIgMjQ4IDE2OC40MTggMjQ4IDE2NFoiIGZpbGw9ImJsYWNrIi8+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNzIgMTkyQzE3MiAxNDUuNjA4IDIwOS42MDggMTA4IDI1NiAxMDhDMzAyLjM5MiAxMDggMzQwIDE0NS42MDggMzQwIDE5MkMzNDAgMjI3LjA2MSAzMTguNTE5IDI1Ny4xMDUgMjg4IDI2OS42OVYzODYuNjdDMjg4IDM5Mi4zOTEgMjg0Ljk0NiAzOTcuNjc2IDI3OS45ODkgNDAwLjUzM0wyNjMuOTg5IDQwOS43NTNDMjU5LjA0NCA0MTIuNjAzIDI1Mi45NTYgNDEyLjYwMyAyNDguMDExIDQwOS43NTNMMjMyLjAxMSA0MDAuNTMzQzIyNy4wNTQgMzk3LjY3NiAyMjQgMzkyLjM5MSAyMjQgMzg2LjY3VjM3MkMyMjQgMzY5Ljg3OCAyMjQuODQzIDM2Ny44NDQgMjI2LjM0MyAzNjYuMzQzTDIzNiAzNTYuNjg2VjM1NS4zMTRMMjI2LjM0MyAzNDUuNjU3QzIyNC44NDMgMzQ0LjE1NyAyMjQgMzQyLjEyMiAyMjQgMzQwVjMzMkMyMjQgMzI5Ljg3OCAyMjQuODQzIDMyNy44NDQgMjI2LjM0MyAzMjYuMzQzTDIzNiAzMTYuNjg2VjMxNS4zMTRMMjI2LjM0MyAzMDUuNjU3QzIyNC44NDMgMzA0LjE1NyAyMjQgMzAyLjEyMiAyMjQgMzAwVjI2OS42OUMxOTMuNDgxIDI1Ny4xMDUgMTcyIDIyNy4wNjEgMTcyIDE5MlpNMjU2IDEyNEMyMTguNDQ1IDEyNCAxODggMTU0LjQ0NSAxODggMTkyQzE4OCAyMjkuNTU1IDIxOC40NDUgMjYwIDI1NiAyNjBDMjkzLjU1NSAyNjAgMzI0IDIyOS41NTUgMzI0IDE5MkMzMjQgMTU0LjQ0NSAyOTMuNTU1IDEyNCAyNTYgMTI0Wk0yNTYgMjc2QzI2MS40NzEgMjc2IDI2Ni44MiAyNzUuNDc3IDI3MiAyNzQuNDc4VjM4Ni42N0wyNTYgMzk1Ljg5TDI0MCAzODYuNjdWMzc1LjMxNEwyNDkuNjU3IDM2NS42NTdDMjUxLjE1NyAzNjQuMTU3IDI1MiAzNjIuMTIyIDI1MiAzNjBWMzUyQzI1MiAzNDkuODc4IDI1MS4xNTcgMzQ3Ljg0NCAyNDkuNjU3IDM0Ni4zNDNMMjQwIDMzNi42ODZWMzM1LjMxNEwyNDkuNjU3IDMyNS42NTdDMjUxLjE1NyAzMjQuMTU3IDI1MiAzMjIuMTIyIDI1MiAzMjBWMzEyQzI1MiAzMDkuODc4IDI1MS4xNTcgMzA3Ljg0NCAyNDkuNjU3IDMwNi4zNDNMMjQwIDI5Ni42ODZWMjc0LjQ3OEMyNDUuMTggMjc1LjQ3NyAyNTAuNTI5IDI3NiAyNTYgMjc2WiIgZmlsbD0iYmxhY2siLz48L2c+PC9nPjxkZWZzPjxsaW5lYXJHcmFkaWVudCBpZD0icGFpbnQwX2xpbmVhcl83NDg0Xzg3NDUiIHgxPSIzOTMuODY1IiB5MT0iNjMuMjc5NiIgeDI9Ijk5LjIwNDMiIHkyPSI0MjkuOTk4IiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSI+PHN0b3Agc3RvcC1jb2xvcj0iIzRERkY4OCIvPjxzdG9wIG9mZnNldD0iMSIgc3RvcC1jb2xvcj0iIzNERThDQSIvPjwvbGluZWFyR3JhZGllbnQ+PGNsaXBQYXRoIGlkPSJjbGlwMF83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PGNsaXBQYXRoIGlkPSJjbGlwMV83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PGNsaXBQYXRoIGlkPSJjbGlwMl83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PC9kZWZzPjwvc3ZnPg==",
"icon_light": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNTEyIiBoZWlnaHQ9IjUxMiIgdmlld0JveD0iMCAwIDUxMiA1MTIiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGcgY2xpcC1wYXRoPSJ1cmwoI2NsaXAwXzc0ODRfODc0NSkiPjxnIGNsaXAtcGF0aD0idXJsKCNjbGlwMV83NDg0Xzg3NDUpIj48ZyBjbGlwLXBhdGg9InVybCgjY2xpcDJfNzQ4NF84NzQ1KSI+PHBhdGggZD0iTTI4MS45NjQgNi45NjE3MUMyNjUuODkzIC0yLjI5OTc0IDI0Ni4xMDcgLTIuMjk5NzQgMjMwLjAzNiA2Ljk2MTcxTDQ2LjAzNjUgMTEzLjAwMkMyOS45MjcxIDEyMi4yODYgMjAgMTM5LjQ2NiAyMCAxNTguMDZWMzUzLjk3MkMyMCAzNzIuNTY2IDI5LjkyNzEgMzg5Ljc0NSA0Ni4wMzY1IDM5OS4wMjlMMjMwLjAzNiA1MDUuMDdDMjQ2LjEwNyA1MTQuMzMxIDI2NS44OTMgNTE0LjMzMSAyODEuOTY0IDUwNS4wN0w0NjUuOTY0IDM5OS4wMjlDNDgyLjA3MyAzODkuNzQ1IDQ5MiAzNzIuNTY2IDQ5MiAzNTMuOTcyVjE1OC4wNkM0OTIgMTM5LjQ2NiA0ODIuMDczIDEyMi4yODYgNDY1Ljk2NCAxMTMuMDAyTDI4MS45NjQgNi45NjE3MVoiIGZpbGw9InVybCgjcGFpbnQwX2xpbmVhcl83NDg0Xzg3NDUpIi8+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik00NTMuOTggMTMzLjc5OEwyNjkuOTggMjcuNzU3NEMyNjEuMzI3IDIyLjc3MDQgMjUwLjY3MyAyMi43NzA0IDI0Mi4wMiAyNy43NTc0TDU4LjAxOTYgMTMzLjc5OEM0OS4zNDUzIDEzOC43OTcgNDQgMTQ4LjA0NyA0NCAxNTguMDZWMzUzLjk3MkM0NCAzNjMuOTg0IDQ5LjM0NTMgMzczLjIzNCA1OC4wMTk2IDM3OC4yMzNMMjQyLjAyIDQ4NC4yNzRDMjUwLjY3MyA0ODkuMjYxIDI2MS4zMjcgNDg5LjI2MSAyNjkuOTggNDg0LjI3NEw0NTMuOTggMzc4LjIzM0M0NjIuNjU1IDM3My4yMzQgNDY4IDM2My45ODQgNDY4IDM1My45NzJWMTU4LjA2QzQ2OCAxNDguMDQ3IDQ2Mi42NTUgMTM4Ljc5NyA0NTMuOTggMTMzLjc5OFpNMjgxLjk2NCA2Ljk2MTcxQzI2NS44OTMgLTIuMjk5NzQgMjQ2LjEwNyAtMi4yOTk3NCAyMzAuMDM2IDYuOTYxNzFMNDYuMDM2NSAxMTMuMDAyQzI5LjkyNzEgMTIyLjI4NiAyMCAxMzkuNDY2IDIwIDE1OC4wNlYzNTMuOTcyQzIwIDM3Mi41NjYgMjkuOTI3MSAzODkuNzQ1IDQ2LjAzNjUgMzk5LjAyOUwyMzAuMDM2IDUwNS4wN0MyNDYuMTA3IDUxNC4zMzEgMjY1Ljg5MyA1MTQuMzMxIDI4MS45NjQgNTA1LjA3TDQ2NS45NjQgMzk5LjAyOUM0ODIuMDczIDM4OS43NDUgNDkyIDM3Mi41NjYgNDkyIDM1My45NzJWMTU4LjA2QzQ5MiAxMzkuNDY2IDQ4Mi4wNzMgMTIyLjI4NiA0NjUuOTY0IDExMy4wMDJMMjgxLjk2NCA2Ljk2MTcxWiIgZmlsbD0iYmxhY2siLz48L2c+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0yNTYgMTQwQzI0Mi43NDUgMTQwIDIzMiAxNTAuNzQ1IDIzMiAxNjRDMjMyIDE3Ny4yNTUgMjQyLjc0NSAxODggMjU2IDE4OEMyNjkuMjU1IDE4OCAyODAgMTc3LjI1NSAyODAgMTY0QzI4MCAxNTAuNzQ1IDI2OS4yNTUgMTQwIDI1NiAxNDBaTTI0OCAxNjRDMjQ4IDE1OS41ODIgMjUxLjU4MiAxNTYgMjU2IDE1NkMyNjAuNDE4IDE1NiAyNjQgMTU5LjU4MiAyNjQgMTY0QzI2NCAxNjguNDE4IDI2MC40MTggMTcyIDI1NiAxNzJDMjUxLjU4MiAxNzIgMjQ4IDE2OC40MTggMjQ4IDE2NFoiIGZpbGw9ImJsYWNrIi8+PHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNzIgMTkyQzE3MiAxNDUuNjA4IDIwOS42MDggMTA4IDI1NiAxMDhDMzAyLjM5MiAxMDggMzQwIDE0NS42MDggMzQwIDE5MkMzNDAgMjI3LjA2MSAzMTguNTE5IDI1Ny4xMDUgMjg4IDI2OS42OVYzODYuNjdDMjg4IDM5Mi4zOTEgMjg0Ljk0NiAzOTcuNjc2IDI3OS45ODkgNDAwLjUzM0wyNjMuOTg5IDQwOS43NTNDMjU5LjA0NCA0MTIuNjAzIDI1Mi45NTYgNDEyLjYwMyAyNDguMDExIDQwOS43NTNMMjMyLjAxMSA0MDAuNTMzQzIyNy4wNTQgMzk3LjY3NiAyMjQgMzkyLjM5MSAyMjQgMzg2LjY3VjM3MkMyMjQgMzY5Ljg3OCAyMjQuODQzIDM2Ny44NDQgMjI2LjM0MyAzNjYuMzQzTDIzNiAzNTYuNjg2VjM1NS4zMTRMMjI2LjM0MyAzNDUuNjU3QzIyNC44NDMgMzQ0LjE1NyAyMjQgMzQyLjEyMiAyMjQgMzQwVjMzMkMyMjQgMzI5Ljg3OCAyMjQuODQzIDMyNy44NDQgMjI2LjM0MyAzMjYuMzQzTDIzNiAzMTYuNjg2VjMxNS4zMTRMMjI2LjM0MyAzMDUuNjU3QzIyNC44NDMgMzA0LjE1NyAyMjQgMzAyLjEyMiAyMjQgMzAwVjI2OS42OUMxOTMuNDgxIDI1Ny4xMDUgMTcyIDIyNy4wNjEgMTcyIDE5MlpNMjU2IDEyNEMyMTguNDQ1IDEyNCAxODggMTU0LjQ0NSAxODggMTkyQzE4OCAyMjkuNTU1IDIxOC40NDUgMjYwIDI1NiAyNjBDMjkzLjU1NSAyNjAgMzI0IDIyOS41NTUgMzI0IDE5MkMzMjQgMTU0LjQ0NSAyOTMuNTU1IDEyNCAyNTYgMTI0Wk0yNTYgMjc2QzI2MS40NzEgMjc2IDI2Ni44MiAyNzUuNDc3IDI3MiAyNzQuNDc4VjM4Ni42N0wyNTYgMzk1Ljg5TDI0MCAzODYuNjdWMzc1LjMxNEwyNDkuNjU3IDM2NS42NTdDMjUxLjE1NyAzNjQuMTU3IDI1MiAzNjIuMTIyIDI1MiAzNjBWMzUyQzI1MiAzNDkuODc4IDI1MS4xNTcgMzQ3Ljg0NCAyNDkuNjU3IDM0Ni4zNDNMMjQwIDMzNi42ODZWMzM1LjMxNEwyNDkuNjU3IDMyNS42NTdDMjUxLjE1NyAzMjQuMTU3IDI1MiAzMjIuMTIyIDI1MiAzMjBWMzEyQzI1MiAzMDkuODc4IDI1MS4xNTcgMzA3Ljg0NCAyNDkuNjU3IDMwNi4zNDNMMjQwIDI5Ni42ODZWMjc0LjQ3OEMyNDUuMTggMjc1LjQ3NyAyNTAuNTI5IDI3NiAyNTYgMjc2WiIgZmlsbD0iYmxhY2siLz48L2c+PC9nPjxkZWZzPjxsaW5lYXJHcmFkaWVudCBpZD0icGFpbnQwX2xpbmVhcl83NDg0Xzg3NDUiIHgxPSIzOTMuODY1IiB5MT0iNjMuMjc5NiIgeDI9Ijk5LjIwNDMiIHkyPSI0MjkuOTk4IiBncmFkaWVudFVuaXRzPSJ1c2VyU3BhY2VPblVzZSI+PHN0b3Agc3RvcC1jb2xvcj0iIzRERkY4OCIvPjxzdG9wIG9mZnNldD0iMSIgc3RvcC1jb2xvcj0iIzNERThDQSIvPjwvbGluZWFyR3JhZGllbnQ+PGNsaXBQYXRoIGlkPSJjbGlwMF83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PGNsaXBQYXRoIGlkPSJjbGlwMV83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PGNsaXBQYXRoIGlkPSJjbGlwMl83NDg0Xzg3NDUiPjxyZWN0IHdpZHRoPSI1MTIiIGhlaWdodD0iNTEyIiBmaWxsPSJ3aGl0ZSIvPjwvY2xpcFBhdGg+PC9kZWZzPjwvc3ZnPg=="
},
"d350af52-0351-4ba2-acd3-dfeeadc3f764": {
"name": "pwSafe",
"icon_dark": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9InllcyI/PjxzdmcgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2ZXJzaW9uPSIxLjEiIGlkPSJzdmcyIiB3aWR0aD0iMTc3Ljc3NzMzIiBoZWlnaHQ9IjE3Ny43NzczMyIgdmlld0JveD0iMCAwIDE3Ny43NzczMyAxNzcuNzc3MzMiIHN0eWxlPSJ6b29tOiAxOyI+PGRlZnMgaWQ9ImRlZnM2Ij48Y2xpcFBhdGggY2xpcFBhdGhVbml0cz0idXNlclNwYWNlT25Vc2UiIGlkPSJjbGlwUGF0aDE2Ij48cGF0aCBkPSJNIDAsMTMyLjMzMyBIIDEzMy4zMzMgViAwIEggMCBaIiBpZD0icGF0aDE0Ij48L3BhdGg+PC9jbGlwUGF0aD48L2RlZnM+PGcgaWQ9Imc4IiB0cmFuc2Zvcm09Im1hdHJpeCgxLjMzMzMzMzMsMCwwLC0xLjMzMzMzMzMsMCwxNzYuNDQ0KSI+PGcgaWQ9ImcxMCI+PGcgaWQ9ImcxMiIgY2xpcC1wYXRoPSJ1cmwoI2NsaXBQYXRoMTYpIj48ZyBpZD0iZzE4IiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMTMuMTQ5NCw4NC42NjE2KSI+PHBhdGggZD0iTTAsMCBjLTkuNjIxLDE2LjI2NCAtMjMuNzA0LDI5Ljg4OCAtMzcuMjU0LDQwLjQzMyBjLTEuMDQyLDAuODEyIC0yLjA4MiwxLjYwNSAtMy4xMTQsMi4zOCBjLTEuODExLDEuMzU1IC0zLjYwMywyLjY1NCAtNS4zNjUsMy44OTYgYy0wLjUxOCwtMC4zNTYgLTEuMDM4LC0wLjcxOCAtMS41NjEsLTEuMDg0IGMtMTIuODUyLC05LjAxMiAtMjcuMjkyLC0yMS4wOTggLTM4LjU2NCwtMzUuODM2IGMtMi45NDMsLTMuODQ4IC01LjY3LC03Ljg3NiAtOC4wOTYsLTEyLjA3OCBjLTcuNDUsLTEyLjkwMiAtMTIuMDY1LC0yNy40NDEgLTE0Ljg4NSwtNDEuMzg4IGMtMS41ODIsLTcuODI0IC0yLjU5OSwtMTUuNDYyIC0zLjIzNSwtMjIuNTIxIGMxOC4xNTIsLTguNjMzIDQyLjM0NSwtMTcuMjYgNjYuNTQsLTE3LjI2IGMyMy4yNTcsMCA0Ni41MTQsNy45NzIgNjQuNDA3LDE2LjI1OSBjLTEuNiwyMC4wMzYgLTYuMjI1LDQ1LjMwNCAtMTguMzIyLDY2LjI1NyBDMC4zNjgsLTAuNjI2IDAuMTg2LC0wLjMxMyAwLDAgIiBpZD0icGF0aDIwIiBzdHlsZT0iZmlsbDojMjYzMjQ4O2ZpbGwtb3BhY2l0eToxO2ZpbGwtcnVsZTpub256ZXJvO3N0cm9rZTpub25lIj48L3BhdGg+PC9nPjxnIGlkPSJnMjIiIHRyYW5zZm9ybT0idHJhbnNsYXRlKDk3LjE2MTEsNzQuNDk0NikiPjxwYXRoIGQ9Ik0wLDAgYy02LjQxNCwxMC44NDIgLTE1LjgwMywxOS45MjUgLTI0LjgzNSwyNi45NTUgYy0wLjY5NiwwLjU0MiAtMS4zODgsMS4wNjkgLTIuMDc3LDEuNTg3IGMtMS4yMDYsMC45MDMgLTIuNDAxLDEuNzY5IC0zLjU3NiwyLjU5NyBjLTAuMzQ2LC0wLjIzOCAtMC42OTQsLTAuNDc5IC0xLjA0MSwtMC43MjMgYy04LjU2OSwtNi4wMDggLTE4LjE5NiwtMTQuMDY1IC0yNS43MSwtMjMuODkgYy0xLjk2MiwtMi41NjYgLTMuNzgsLTUuMjUxIC01LjM5NywtOC4wNTIgYy00Ljk2NywtOC42MDEgLTguMDQzLC0xOC4yOTQgLTkuOTI0LC0yNy41OTMgYy0xLjA1NSwtNS4yMTYgLTEuNzMyLC0xMC4zMDggLTIuMTU2LC0xNS4wMTMgYzEyLjEwMiwtNS43NTUgMjguMjMxLC0xMS41MDYgNDQuMzYxLC0xMS41MDYgYzE1LjUwMywtMC4wMDEgMzEuMDA4LDUuMzE0IDQyLjkzNywxMC44MzkgQzExLjUxNiwtMzEuNDQyIDguNDMzLC0xNC41OTcgMC4zNjcsLTAuNjI3IEMwLjI0NiwtMC40MTggMC4xMjQsLTAuMjA5IDAsMCAiIGlkPSJwYXRoMjQiIHN0eWxlPSJmaWxsOiM3Zjg5YTE7ZmlsbC1vcGFjaXR5OjE7ZmlsbC1ydWxlOm5vbnplcm87c3Ryb2tlOm5vbmUiPjwvcGF0aD48L2c+PGcgaWQ9ImcyNiIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoODEuNjI4OSw2NS4zMzc5KSI+PHBhdGggZD0iTTAsMC43NTAwNSBjLTMuMjA4LDUuNDIxIC03LjkwMiw5Ljk2MiAtMTIuNDIsMTMuNDc4IGMtMC4zNDcsMC4yNyAtMC42OTMsMC41MzQgLTEuMDM4LDAuNzkzIGMtMC42MDQsMC40NTIgLTEuMiwwLjg4NSAtMS43ODcsMS4yOTkgYy0wLjE3MywtMC4xMiAtMC4zNDgsLTAuMjQgLTAuNTIxLC0wLjM2MiBjLTQuMjg0LC0zLjAwNCAtOS4wOTcsLTcuMDMyIC0xMi44NTUsLTExLjk0NSBjLTAuOTgxLC0xLjI4MyAtMS44OSwtMi42MjYgLTIuNjk4LC00LjAyNyBjLTIuNDgzLC00LjI5OSAtNC4wMjIsLTkuMTQ3IC00Ljk2MiwtMTMuNzk2IGMtMC41MjcsLTIuNjA4IC0wLjg2NywtNS4xNTQgLTEuMDc4LC03LjUwNiBjNi4wNSwtMi44NzggMTQuMTE1LC01Ljc1MyAyMi4xOCwtNS43NTMgYzcuNzUyLDAgMTUuNTA0LDIuNjU3IDIxLjQ2OSw1LjQyIEM1Ljc1OCwtMTQuOTcxOTUxIDQuMjE1LC02LjU0Nzk1IDAuMTgyLDAuNDM2MDUgQzAuMTIyLDAuNTQyMDUgMC4wNjEsMC42NDYwNSAwLDAuNzUwMDUgIiBpZD0icGF0aDI4IiBzdHlsZT0iZmlsbDojZjc5NzFkO2ZpbGwtb3BhY2l0eToxO2ZpbGwtcnVsZTpub256ZXJvO3N0cm9rZTpub25lIj48L3BhdGg+PC9nPjwvZz48L2c+PC9nPjwvc3ZnPg==",
"icon_light": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9InllcyI/PjxzdmcgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB2ZXJzaW9uPSIxLjEiIGlkPSJzdmcyIiB3aWR0aD0iMTc3Ljc3NzMzIiBoZWlnaHQ9IjE3Ny43NzczMyIgdmlld0JveD0iMCAwIDE3Ny43NzczMyAxNzcuNzc3MzMiIHN0eWxlPSJ6b29tOiAxOyI+PGRlZnMgaWQ9ImRlZnM2Ij48Y2xpcFBhdGggY2xpcFBhdGhVbml0cz0idXNlclNwYWNlT25Vc2UiIGlkPSJjbGlwUGF0aDE2Ij48cGF0aCBkPSJNIDAsMTMyLjMzMyBIIDEzMy4zMzMgViAwIEggMCBaIiBpZD0icGF0aDE0Ij48L3BhdGg+PC9jbGlwUGF0aD48L2RlZnM+PGcgaWQ9Imc4IiB0cmFuc2Zvcm09Im1hdHJpeCgxLjMzMzMzMzMsMCwwLC0xLjMzMzMzMzMsMCwxNzYuNDQ0KSI+PGcgaWQ9ImcxMCI+PGcgaWQ9ImcxMiIgY2xpcC1wYXRoPSJ1cmwoI2NsaXBQYXRoMTYpIj48ZyBpZD0iZzE4IiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMTMuMTQ5NCw4NC42NjE2KSI+PHBhdGggZD0iTTAsMCBjLTkuNjIxLDE2LjI2NCAtMjMuNzA0LDI5Ljg4OCAtMzcuMjU0LDQwLjQzMyBjLTEuMDQyLDAuODEyIC0yLjA4MiwxLjYwNSAtMy4xMTQsMi4zOCBjLTEuODExLDEuMzU1IC0zLjYwMywyLjY1NCAtNS4zNjUsMy44OTYgYy0wLjUxOCwtMC4zNTYgLTEuMDM4LC0wLjcxOCAtMS41NjEsLTEuMDg0IGMtMTIuODUyLC05LjAxMiAtMjcuMjkyLC0yMS4wOTggLTM4LjU2NCwtMzUuODM2IGMtMi45NDMsLTMuODQ4IC01LjY3LC03Ljg3NiAtOC4wOTYsLTEyLjA3OCBjLTcuNDUsLTEyLjkwMiAtMTIuMDY1LC0yNy40NDEgLTE0Ljg4NSwtNDEuMzg4IGMtMS41ODIsLTcuODI0IC0yLjU5OSwtMTUuNDYyIC0zLjIzNSwtMjIuNTIxIGMxOC4xNTIsLTguNjMzIDQyLjM0NSwtMTcuMjYgNjYuNTQsLTE3LjI2IGMyMy4yNTcsMCA0Ni41MTQsNy45NzIgNjQuNDA3LDE2LjI1OSBjLTEuNiwyMC4wMzYgLTYuMjI1LDQ1LjMwNCAtMTguMzIyLDY2LjI1NyBDMC4zNjgsLTAuNjI2IDAuMTg2LC0wLjMxMyAwLDAgIiBpZD0icGF0aDIwIiBzdHlsZT0iZmlsbDojMjYzMjQ4O2ZpbGwtb3BhY2l0eToxO2ZpbGwtcnVsZTpub256ZXJvO3N0cm9rZTpub25lIj48L3BhdGg+PC9nPjxnIGlkPSJnMjIiIHRyYW5zZm9ybT0idHJhbnNsYXRlKDk3LjE2MTEsNzQuNDk0NikiPjxwYXRoIGQ9Ik0wLDAgYy02LjQxNCwxMC44NDIgLTE1LjgwMywxOS45MjUgLTI0LjgzNSwyNi45NTUgYy0wLjY5NiwwLjU0MiAtMS4zODgsMS4wNjkgLTIuMDc3LDEuNTg3IGMtMS4yMDYsMC45MDMgLTIuNDAxLDEuNzY5IC0zLjU3NiwyLjU5NyBjLTAuMzQ2LC0wLjIzOCAtMC42OTQsLTAuNDc5IC0xLjA0MSwtMC43MjMgYy04LjU2OSwtNi4wMDggLTE4LjE5NiwtMTQuMDY1IC0yNS43MSwtMjMuODkgYy0xLjk2MiwtMi41NjYgLTMuNzgsLTUuMjUxIC01LjM5NywtOC4wNTIgYy00Ljk2NywtOC42MDEgLTguMDQzLC0xOC4yOTQgLTkuOTI0LC0yNy41OTMgYy0xLjA1NSwtNS4yMTYgLTEuNzMyLC0xMC4zMDggLTIuMTU2LC0xNS4wMTMgYzEyLjEwMiwtNS43NTUgMjguMjMxLC0xMS41MDYgNDQuMzYxLC0xMS41MDYgYzE1LjUwMywtMC4wMDEgMzEuMDA4LDUuMzE0IDQyLjkzNywxMC44MzkgQzExLjUxNiwtMzEuNDQyIDguNDMzLC0xNC41OTcgMC4zNjcsLTAuNjI3IEMwLjI0NiwtMC40MTggMC4xMjQsLTAuMjA5IDAsMCAiIGlkPSJwYXRoMjQiIHN0eWxlPSJmaWxsOiM3Zjg5YTE7ZmlsbC1vcGFjaXR5OjE7ZmlsbC1ydWxlOm5vbnplcm87c3Ryb2tlOm5vbmUiPjwvcGF0aD48L2c+PGcgaWQ9ImcyNiIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoODEuNjI4OSw2NS4zMzc5KSI+PHBhdGggZD0iTTAsMC43NTAwNSBjLTMuMjA4LDUuNDIxIC03LjkwMiw5Ljk2MiAtMTIuNDIsMTMuNDc4IGMtMC4zNDcsMC4yNyAtMC42OTMsMC41MzQgLTEuMDM4LDAuNzkzIGMtMC42MDQsMC40NTIgLTEuMiwwLjg4NSAtMS43ODcsMS4yOTkgYy0wLjE3MywtMC4xMiAtMC4zNDgsLTAuMjQgLTAuNTIxLC0wLjM2MiBjLTQuMjg0LC0zLjAwNCAtOS4wOTcsLTcuMDMyIC0xMi44NTUsLTExLjk0NSBjLTAuOTgxLC0xLjI4MyAtMS44OSwtMi42MjYgLTIuNjk4LC00LjAyNyBjLTIuNDgzLC00LjI5OSAtNC4wMjIsLTkuMTQ3IC00Ljk2MiwtMTMuNzk2IGMtMC41MjcsLTIuNjA4IC0wLjg2NywtNS4xNTQgLTEuMDc4LC03LjUwNiBjNi4wNSwtMi44NzggMTQuMTE1LC01Ljc1MyAyMi4xOCwtNS43NTMgYzcuNzUyLDAgMTUuNTA0LDIuNjU3IDIxLjQ2OSw1LjQyIEM1Ljc1OCwtMTQuOTcxOTUxIDQuMjE1LC02LjU0Nzk1IDAuMTgyLDAuNDM2MDUgQzAuMTIyLDAuNTQyMDUgMC4wNjEsMC42NDYwNSAwLDAuNzUwMDUgIiBpZD0icGF0aDI4IiBzdHlsZT0iZmlsbDojZjc5NzFkO2ZpbGwtb3BhY2l0eToxO2ZpbGwtcnVsZTpub256ZXJvO3N0cm9rZTpub25lIj48L3BhdGg+PC9nPjwvZz48L2c+PC9nPjwvc3ZnPg=="
}
}

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More