Compare commits

..

10 Commits

Author SHA1 Message Date
4e9a466d64 re-add paramiko
This reverts commit eb6f515ee0e9ed5196565c79eea9dd5b5cdc7444.

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
9bd8cfbac0 fix folder perms
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
e18c2fe084 fix web issue
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
205f11532f fix build
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
bc6d66cd88 use open
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
609e9a00b4 what a pointless warning
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
d5708d22e0 fix error handling
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
71ac1282f9 cleanup
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
cf9d8f64a2 simplify config, adjust perms in dockerfile
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
1cda01511b outposts: use native ssh client
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-04-24 18:10:30 +03:00
207 changed files with 15972 additions and 26010 deletions

View File

@ -19,21 +19,6 @@ Please check the [Contributing guidelines](https://github.com/goauthentik/authen
- Adds breaking change which causes \<issue\>.
## Checklist
## Additional
- [ ] Local tests pass (`ak test authentik/`)
- [ ] The code has been formatted (`make lint-fix`)
If an API change has been made
- [ ] The API schema has been updated (`make gen-build`)
If changes to the frontend have been made
- [ ] The code has been formatted (`make web`)
- [ ] The translation files have been updated (`make i18n-extract`)
If applicable
- [ ] The documentation has been updated
- [ ] The documentation has been formatted (`make website`)
Any further notes or comments you want to make.

View File

@ -1,34 +0,0 @@
name: authentik-translation-advice
on:
pull_request:
branches:
- main
paths:
- "!**"
- "locale/**"
- "web/src/locales/**"
jobs:
post-comment:
runs-on: ubuntu-latest
steps:
- name: Find Comment
uses: peter-evans/find-comment@v2
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: "github-actions[bot]"
body-includes: authentik translations instructions
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v2
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
### authentik translations instructions
Thanks for your pull request!
authentik translations are handled using [Transifex](https://explore.transifex.com/authentik/authentik/). Please edit translations over there and they'll be included automatically.

View File

@ -3,7 +3,10 @@ on:
push:
branches: [main]
paths:
- "locale/**"
- "/locale/"
pull_request:
paths:
- "/locale/"
workflow_dispatch:
env:
@ -21,7 +24,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: run compile
run: poetry run ak compilemessages
run: poetry run ./manage.py compilemessages
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
id: cpr

View File

@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy
FROM docker.io/golang:1.20.4-bullseye AS go-builder
FROM docker.io/golang:1.20.3-bullseye AS go-builder
WORKDIR /work
@ -84,6 +84,8 @@ RUN apt-get update && \
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \
# Required for outposts
apt-get install -y --no-install-recommends openssh-client && \
pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \
@ -91,8 +93,9 @@ RUN apt-get update && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
mkdir -p /certs /media /blueprints && \
mkdir -p /authentik/.ssh && \
chown authentik:authentik /certs /media /authentik/.ssh
chown authentik:authentik /certs /media && \
chmod g+w /etc/ssh/ssh_config.d/ && \
chgrp authentik /etc/ssh/ssh_config.d/
COPY ./authentik/ /authentik
COPY ./pyproject.toml /

View File

@ -49,8 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
context = self.instance.context if self.instance else {}
valid, logs = Importer(content, context).validate()
if not valid:
text_logs = "\n".join([x["event"] for x in logs])
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
raise ValidationError(_("Failed to validate blueprint"), *[x["msg"] for x in logs])
return content
def validate(self, attrs: dict) -> dict:

View File

@ -1,17 +1,12 @@
"""Generate JSON Schema for blueprints"""
from json import dumps
from typing import Any
from json import dumps, loads
from pathlib import Path
from django.core.management.base import BaseCommand, no_translations
from django.db.models import Model
from drf_jsonschema_serializer.convert import field_to_converter
from rest_framework.fields import Field, JSONField, UUIDField
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.blueprints.v1.meta.registry import registry
from authentik.lib.models import SerializerModel
LOGGER = get_logger()
@ -21,138 +16,21 @@ class Command(BaseCommand):
schema: dict
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.schema = {
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "https://goauthentik.io/blueprints/schema.json",
"type": "object",
"title": "authentik Blueprint schema",
"required": ["version", "entries"],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1,
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": ["name"],
"properties": {
"name": {"type": "string"},
"labels": {"type": "object", "additionalProperties": {"type": "string"}},
},
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": True,
},
"entries": {
"type": "array",
"items": {
"oneOf": [],
},
},
},
"$defs": {},
}
@no_translations
def handle(self, *args, **options):
"""Generate JSON Schema for blueprints"""
self.build()
self.stdout.write(dumps(self.schema, indent=4, default=Command.json_default))
path = Path(__file__).parent.joinpath("./schema_template.json")
with open(path, "r", encoding="utf-8") as _template_file:
self.schema = loads(_template_file.read())
self.set_model_allowed()
self.stdout.write(dumps(self.schema, indent=4))
@staticmethod
def json_default(value: Any) -> Any:
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
return str(value)
def build(self):
"""Build all models into the schema"""
def set_model_allowed(self):
"""Set model enum"""
model_names = []
for model in registry.get_models():
if model._meta.abstract:
continue
if not is_model_allowed(model):
continue
model_instance: Model = model()
if not isinstance(model_instance, SerializerModel):
continue
serializer = model_instance.serializer()
model_path = f"{model._meta.app_label}.{model._meta.model_name}"
self.schema["properties"]["entries"]["items"]["oneOf"].append(
self.template_entry(model_path, serializer)
)
def template_entry(self, model_path: str, serializer: Serializer) -> dict:
"""Template entry for a single model"""
model_schema = self.to_jsonschema(serializer)
model_schema["required"] = []
def_name = f"model_{model_path}"
def_path = f"#/$defs/{def_name}"
self.schema["$defs"][def_name] = model_schema
return {
"type": "object",
"required": ["model", "attrs"],
"properties": {
"model": {"const": model_path},
"id": {"type": "string"},
"state": {
"type": "string",
"enum": ["absent", "present", "created"],
"default": "present",
},
"conditions": {"type": "array", "items": {"type": "boolean"}},
"attrs": {"$ref": def_path},
"identifiers": {"$ref": def_path},
},
}
def field_to_jsonschema(self, field: Field) -> dict:
"""Convert a single field to json schema"""
if isinstance(field, Serializer):
result = self.to_jsonschema(field)
else:
try:
converter = field_to_converter[field]
result = converter.convert(field)
except KeyError:
if isinstance(field, JSONField):
result = {"type": "object", "additionalProperties": True}
elif isinstance(field, UUIDField):
result = {"type": "string", "format": "uuid"}
else:
raise
if field.label:
result["title"] = field.label
if field.help_text:
result["description"] = field.help_text
return self.clean_result(result)
def clean_result(self, result: dict) -> dict:
"""Remove enumNames from result, recursively"""
result.pop("enumNames", None)
for key, value in result.items():
if isinstance(value, dict):
result[key] = self.clean_result(value)
return result
def to_jsonschema(self, serializer: Serializer) -> dict:
"""Convert serializer to json schema"""
properties = {}
required = []
for name, field in serializer.fields.items():
if field.read_only:
continue
sub_schema = self.field_to_jsonschema(field)
if field.required:
required.append(name)
properties[name] = sub_schema
result = {"type": "object", "properties": properties}
if required:
result["required"] = required
return result
model_names.append(f"{model._meta.app_label}.{model._meta.model_name}")
model_names.sort()
self.schema["properties"]["entries"]["items"]["properties"]["model"]["enum"] = model_names

View File

@ -0,0 +1,105 @@
{
"$schema": "http://json-schema.org/draft-07/schema",
"$id": "http://example.com/example.json",
"type": "object",
"title": "authentik Blueprint schema",
"default": {},
"required": [
"version",
"entries"
],
"properties": {
"version": {
"$id": "#/properties/version",
"type": "integer",
"title": "Blueprint version",
"default": 1
},
"metadata": {
"$id": "#/properties/metadata",
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"labels": {
"type": "object"
}
}
},
"context": {
"$id": "#/properties/context",
"type": "object",
"additionalProperties": true
},
"entries": {
"type": "array",
"items": {
"$id": "#entry",
"type": "object",
"required": [
"model"
],
"properties": {
"model": {
"type": "string",
"enum": [
"placeholder"
]
},
"id": {
"type": "string"
},
"state": {
"type": "string",
"enum": [
"absent",
"present",
"created"
],
"default": "present"
},
"conditions": {
"type": "array",
"items": {
"type": "boolean"
}
},
"attrs": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Commonly available field, may not exist on all models"
}
},
"default": {},
"additionalProperties": true
},
"identifiers": {
"type": "object",
"default": {},
"properties": {
"pk": {
"description": "Commonly available field, may not exist on all models",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"format": "uuid"
}
]
}
},
"additionalProperties": true
}
}
}
}
}
}

View File

@ -1,20 +0,0 @@
# Generated by Django 4.1.7 on 2023-04-28 10:49
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_blueprints", "0002_blueprintinstance_content"),
]
operations = [
migrations.RunPython(fallback_names("authentik_blueprints", "blueprintinstance", "name")),
migrations.AlterField(
model_name="blueprintinstance",
name="name",
field=models.TextField(unique=True),
),
]

View File

@ -57,7 +57,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
instance_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
metadata = models.JSONField(default=dict)
path = models.TextField(default="", blank=True)
content = models.TextField(default="", blank=True)

View File

@ -67,7 +67,4 @@ class TestBlueprintsV1API(APITestCase):
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content.decode(),
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
)
self.assertJSONEqual(res.content.decode(), {"content": ["Failed to validate blueprint"]})

View File

@ -299,7 +299,7 @@ class Importer:
orig_import = deepcopy(self.__import)
if self.__import.version != 1:
self.logger.warning("Invalid blueprint version")
return False, [{"event": "Invalid blueprint version"}]
return False, []
with (
transaction_rollback(),
capture_logs() as logs,

View File

@ -101,10 +101,7 @@ def blueprints_find():
"""Find blueprints and return valid ones"""
blueprints = []
root = Path(CONFIG.y("blueprints_dir"))
for path in root.rglob("**/*.yaml"):
# Check if any part in the path starts with a dot and assume a hidden file
if any(part for part in path.parts if part.startswith(".")):
continue
for path in root.glob("**/*.yaml"):
LOGGER.debug("found blueprint", path=str(path))
with open(path, "r", encoding="utf-8") as blueprint_file:
try:

View File

@ -93,6 +93,7 @@ class PropertyMappingViewSet(
{
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
# pyright: reportGeneralTypeIssues=false
"component": subclass().component,
"model_name": subclass._meta.model_name,
}

View File

@ -25,6 +25,7 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Provider) -> str: # pragma: no cover
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Provider:
return ""
return obj.component

View File

@ -40,6 +40,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Source) -> str:
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Source:
return ""
return obj.component
@ -138,6 +139,7 @@ class SourceViewSet(
component = subclass.__bases__[0]().component
else:
component = subclass().component
# pyright: reportGeneralTypeIssues=false
data.append(
{
"name": subclass._meta.verbose_name,

View File

@ -56,6 +56,7 @@ class UsedByMixin:
# pylint: disable=too-many-locals
def used_by(self, request: Request, *args, **kwargs) -> Response:
"""Get a list of all objects that use this object"""
# pyright: reportGeneralTypeIssues=false
model: Model = self.get_object()
used_by = []
shadows = []

View File

@ -13,7 +13,6 @@ from authentik.core.views.interface import FlowInterfaceView, InterfaceView
from authentik.core.views.session import EndSessionView
from authentik.root.asgi_middleware import SessionMiddleware
from authentik.root.messages.consumer import MessageConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware
urlpatterns = [
path(
@ -71,10 +70,7 @@ urlpatterns = [
websocket_urlpatterns = [
path(
"ws/client/",
ChannelsLoggingMiddleware(
CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
),
"ws/client/", CookieMiddleware(SessionMiddleware(AuthMiddleware(MessageConsumer.as_asgi())))
),
]

View File

@ -160,7 +160,6 @@ class CertificateKeyPairSerializer(ModelSerializer):
"managed",
]
extra_kwargs = {
"managed": {"read_only": True},
"key_data": {"write_only": True},
"certificate_data": {"write_only": True},
}

View File

@ -1,20 +0,0 @@
# Generated by Django 4.1.7 on 2023-04-28 10:49
from django.db import migrations, models
from authentik.lib.migrations import fallback_names
class Migration(migrations.Migration):
dependencies = [
("authentik_crypto", "0003_certificatekeypair_managed"),
]
operations = [
migrations.RunPython(fallback_names("authentik_crypto", "certificatekeypair", "name")),
migrations.AlterField(
model_name="certificatekeypair",
name="name",
field=models.TextField(unique=True),
),
]

View File

@ -26,7 +26,7 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
kp_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
name = models.TextField(unique=True)
name = models.TextField()
certificate_data = models.TextField(help_text=_("PEM-encoded Certificate data"))
key_data = models.TextField(
help_text=_(

View File

@ -37,22 +37,20 @@ class TestCrypto(APITestCase):
keypair = create_test_cert()
self.assertTrue(
CertificateKeyPairSerializer(
instance=keypair,
data={
"name": keypair.name,
"certificate_data": keypair.certificate_data,
"key_data": keypair.key_data,
},
}
).is_valid()
)
self.assertFalse(
CertificateKeyPairSerializer(
instance=keypair,
data={
"name": keypair.name,
"certificate_data": "test",
"key_data": "test",
},
}
).is_valid()
)
@ -248,6 +246,7 @@ class TestCrypto(APITestCase):
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
_key.write(builder.private_key)
with CONFIG.patch("cert_discovery_dir", temp_dir):
# pyright: reportGeneralTypeIssues=false
certificate_discovery() # pylint: disable=no-value-for-parameter
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
managed=MANAGED_DISCOVERED % "foo"

View File

@ -219,13 +219,13 @@ class Event(SerializerModel, ExpiringModel):
self.context["http_request"] = {
"path": request.path,
"method": request.method,
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
"args": QueryDict(request.META.get("QUERY_STRING", "")),
}
# Special case for events created during flow execution
# since they keep the http query within a wrapped query
if QS_QUERY in self.context["http_request"]["args"]:
wrapped = self.context["http_request"]["args"][QS_QUERY]
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
self.context["http_request"]["args"] = QueryDict(wrapped)
if hasattr(request, "tenant"):
tenant: Tenant = request.tenant
# Because self.created only gets set on save, we can't use it's value here
@ -453,6 +453,7 @@ class NotificationTransport(SerializerModel):
try:
from authentik.stages.email.tasks import send_mail
# pyright: reportGeneralTypeIssues=false
return send_mail(mail.__dict__) # pylint: disable=no-value-for-parameter
except (SMTPException, ConnectionError, OSError) as exc:
raise NotificationTransportError(exc) from exc

View File

@ -1,25 +1,17 @@
"""event tests"""
from urllib.parse import urlencode
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase
from django.views.debug import SafeExceptionReporterFilter
from django.test import TestCase
from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Group
from authentik.events.models import Event
from authentik.flows.views.executor import QS_QUERY
from authentik.lib.generators import generate_id
from authentik.policies.dummy.models import DummyPolicy
from authentik.tenants.models import Tenant
class TestEvents(TestCase):
"""Test Event"""
def setUp(self) -> None:
self.factory = RequestFactory()
def test_new_with_model(self):
"""Create a new Event passing a model as kwarg"""
test_model = Group.objects.create(name="test")
@ -48,58 +40,3 @@ class TestEvents(TestCase):
model_content_type = ContentType.objects.get_for_model(temp_model)
self.assertEqual(event.context.get("model").get("app"), model_content_type.app_label)
self.assertEqual(event.context.get("model").get("pk"), temp_model.pk.hex)
def test_from_http_basic(self):
"""Test plain from_http"""
event = Event.new("unittest").from_http(self.factory.get("/"))
self.assertEqual(
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
)
def test_from_http_clean_querystring(self):
"""Test cleansing query string"""
request = self.factory.get(f"/?token={generate_id()}")
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.context,
{
"http_request": {
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
}
},
)
def test_from_http_clean_querystring_flow(self):
"""Test cleansing query string (nested query string like flow executor)"""
nested_qs = {"token": generate_id()}
request = self.factory.get(f"/?{QS_QUERY}={urlencode(nested_qs)}")
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.context,
{
"http_request": {
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
}
},
)
def test_from_http_tenant(self):
"""Test from_http tenant"""
# Test tenant
request = self.factory.get("/")
tenant = Tenant(domain="test-tenant")
setattr(request, "tenant", tenant)
event = Event.new("unittest").from_http(request)
self.assertEqual(
event.tenant,
{
"app": "authentik_tenants",
"model_name": "tenant",
"name": "Tenant test-tenant",
"pk": tenant.pk.hex,
},
)

View File

@ -27,6 +27,7 @@ class StageSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Stage) -> str:
"""Get object type so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Stage:
return ""
return obj.component

View File

@ -182,4 +182,5 @@ class HttpChallengeResponse(JsonResponse):
"""Subclass of JsonResponse that uses the `DataclassEncoder`"""
def __init__(self, challenge, **kwargs) -> None:
# pyright: reportGeneralTypeIssues=false
super().__init__(challenge.data, encoder=DataclassEncoder, **kwargs)

View File

@ -177,6 +177,7 @@ class ConfigLoader:
# Walk each component of the path
path_parts = path.split(sep)
for comp in path_parts[:-1]:
# pyright: reportGeneralTypeIssues=false
if comp not in root:
root[comp] = {}
root = root.get(comp, {})

View File

@ -31,6 +31,7 @@ class ServiceConnectionSerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: OutpostServiceConnection) -> str:
"""Get object type so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == OutpostServiceConnection:
return ""
return obj.component
@ -76,6 +77,7 @@ class ServiceConnectionViewSet(
data = []
for subclass in all_subclasses(self.queryset.model):
subclass: OutpostServiceConnection
# pyright: reportGeneralTypeIssues=false
data.append(
{
"name": subclass._meta.verbose_name,

View File

@ -1,4 +1,5 @@
"""Docker controller"""
from subprocess import SubprocessError # nosec
from time import sleep
from typing import Optional
from urllib.parse import urlparse
@ -9,7 +10,6 @@ from docker import DockerClient as UpstreamDockerClient
from docker.errors import DockerException, NotFound
from docker.models.containers import Container
from docker.utils.utils import kwargs_from_env
from paramiko.ssh_exception import SSHException
from structlog.stdlib import get_logger
from yaml import safe_dump
@ -58,8 +58,9 @@ class DockerClient(UpstreamDockerClient, BaseClient):
super().__init__(
base_url=connection.url,
tls=tls_config,
use_ssh_client=True,
)
except SSHException as exc:
except SubprocessError as exc:
if self.ssh:
self.ssh.cleanup()
raise ServiceConnectionInvalid(exc) from exc

View File

@ -7,8 +7,7 @@ from docker.errors import DockerException
from authentik.crypto.models import CertificateKeyPair
HEADER = "### Managed by authentik"
FOOTER = "### End Managed by authentik"
SSH_CONFIG_DIR = Path("/etc/ssh/ssh_config.d/")
def opener(path, flags):
@ -28,70 +27,54 @@ class DockerInlineSSH:
key_path: str
config_path: Path
header: str
def __init__(self, host: str, keypair: CertificateKeyPair) -> None:
self.host = host
self.keypair = keypair
self.config_path = Path("~/.ssh/config").expanduser()
if self.config_path.exists() and HEADER not in self.config_path.read_text(encoding="utf-8"):
# SSH Config file already exists and there's no header from us, meaning that it's
# been externally mapped into the container for more complex configs
raise SSHManagedExternallyException(
"SSH Config exists and does not contain authentik header"
)
self.config_path = SSH_CONFIG_DIR / Path(self.host + ".conf")
with open(self.config_path, "w", encoding="utf-8") as _config:
if not _config.writable():
# SSH Config file already exists and there's no header from us, meaning that it's
# been externally mapped into the container for more complex configs
raise SSHManagedExternallyException(
"SSH Config exists and does not contain authentik header"
)
if not self.keypair:
raise DockerException("keypair must be set for SSH connections")
self.header = f"{HEADER} - {self.host}\n"
def write_config(self, key_path: str) -> bool:
def write_config(self, key_path: str):
"""Update the local user's ssh config file"""
with open(self.config_path, "a+", encoding="utf-8") as ssh_config:
if self.header in ssh_config.readlines():
return False
with open(self.config_path, "w", encoding="utf-8") as ssh_config:
ssh_config.writelines(
[
self.header,
f"Host {self.host}\n",
f" IdentityFile {key_path}\n",
f" IdentityFile {str(key_path)}\n",
" StrictHostKeyChecking No\n",
" UserKnownHostsFile /dev/null\n",
f"{FOOTER}\n",
"\n",
]
)
return True
def write_key(self):
def write_key(self) -> Path:
"""Write keypair's private key to a temporary file"""
path = Path(gettempdir(), f"{self.keypair.pk}_private.pem")
with open(path, "w", encoding="utf8", opener=opener) as _file:
_file.write(self.keypair.key_data)
return str(path)
return path
def write(self):
"""Write keyfile and update ssh config"""
self.key_path = self.write_key()
was_written = self.write_config(self.key_path)
if not was_written:
try:
self.write_config(self.key_path)
except OSError:
self.cleanup()
def cleanup(self):
"""Cleanup when we're done"""
try:
os.unlink(self.key_path)
with open(self.config_path, "r", encoding="utf-8") as ssh_config:
start = 0
end = 0
lines = ssh_config.readlines()
for idx, line in enumerate(lines):
if line == self.header:
start = idx
if start != 0 and line == f"{FOOTER}\n":
end = idx
with open(self.config_path, "w+", encoding="utf-8") as ssh_config:
lines = lines[:start] + lines[end + 2 :]
ssh_config.writelines(lines)
os.unlink(self.config_path)
except OSError:
# If we fail deleting a file it doesn't matter that much
# since we're just in a container

View File

@ -128,7 +128,7 @@ class OutpostServiceConnection(models.Model):
@property
def state_key(self) -> str:
"""Key used to save connection state in cache"""
return f"goauthentik.io/outposts/service_connection_state/{self.pk.hex}"
return f"outpost_service_connection_{self.pk.hex}"
@property
def state(self) -> OutpostServiceConnectionState:
@ -278,7 +278,7 @@ class Outpost(SerializerModel, ManagedModel):
@property
def state_cache_prefix(self) -> str:
"""Key by which the outposts status is saved"""
return f"goauthentik.io/outposts/state/{self.uuid.hex}"
return f"goauthentik.io/outposts/{self.uuid.hex}_state"
@property
def state(self) -> list["OutpostState"]:
@ -433,19 +433,19 @@ class OutpostState:
@staticmethod
def for_outpost(outpost: Outpost) -> list["OutpostState"]:
"""Get all states for an outpost"""
keys = cache.keys(f"{outpost.state_cache_prefix}/*")
keys = cache.keys(f"{outpost.state_cache_prefix}_*")
if not keys:
return []
states = []
for key in keys:
instance_uid = key.replace(f"{outpost.state_cache_prefix}/", "")
instance_uid = key.replace(f"{outpost.state_cache_prefix}_", "")
states.append(OutpostState.for_instance_uid(outpost, instance_uid))
return states
@staticmethod
def for_instance_uid(outpost: Outpost, uid: str) -> "OutpostState":
"""Get state for a single instance"""
key = f"{outpost.state_cache_prefix}/{uid}"
key = f"{outpost.state_cache_prefix}_{uid}"
default_data = {"uid": uid, "channel_ids": []}
data = cache.get(key, default_data)
if isinstance(data, str):
@ -458,10 +458,10 @@ class OutpostState:
def save(self, timeout=OUTPOST_HELLO_INTERVAL):
"""Save current state to cache"""
full_key = f"{self._outpost.state_cache_prefix}/{self.uid}"
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
return cache.set(full_key, asdict(self), timeout=timeout)
def delete(self):
"""Manually delete from cache, used on channel disconnect"""
full_key = f"{self._outpost.state_cache_prefix}/{self.uid}"
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
cache.delete(full_key)

View File

@ -45,7 +45,7 @@ from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesCont
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s"
CACHE_KEY_OUTPOST_DOWN = "outpost_teardown_%s"
def controller_for_outpost(outpost: Outpost) -> Optional[type[BaseController]]:
@ -148,8 +148,6 @@ def outpost_controller(
except (ControllerException, ServiceConnectionInvalid) as exc:
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
else:
if from_cache:
cache.delete(CACHE_KEY_OUTPOST_DOWN % outpost_pk)
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))

View File

@ -2,8 +2,7 @@
from django.urls import path
from authentik.outposts.channels import OutpostConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware
websocket_urlpatterns = [
path("ws/outpost/<uuid:pk>/", ChannelsLoggingMiddleware(OutpostConsumer.as_asgi())),
path("ws/outpost/<uuid:pk>/", OutpostConsumer.as_asgi()),
]

View File

@ -40,6 +40,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
def get_component(self, obj: Policy) -> str: # pragma: no cover
"""Get object component so that we know how to edit the object"""
# pyright: reportGeneralTypeIssues=false
if obj.__class__ == Policy:
return ""
return obj.component
@ -49,6 +50,7 @@ class PolicySerializer(ModelSerializer, MetaNameSerializer):
return obj.bindings.count() + obj.promptstage_set.count()
def to_representation(self, instance: Policy):
# pyright: reportGeneralTypeIssues=false
if instance.__class__ == Policy or not self._resolve_inheritance:
return super().to_representation(instance)
return dict(instance.serializer(instance=instance, resolve_inheritance=False).data)

View File

@ -19,6 +19,7 @@ class AccessDeniedResponse(TemplateResponse):
error_message: Optional[str] = None
policy_result: Optional[PolicyResult] = None
# pyright: reportGeneralTypeIssues=false
def __init__(self, request: HttpRequest, template="policies/denied.html") -> None:
super().__init__(request, template)
self.title = _("Access denied")

View File

@ -74,6 +74,7 @@ class PolicyEngine:
def _check_policy_type(self, binding: PolicyBinding):
"""Check policy type, make sure it's not the root class as that has no logic implemented"""
# pyright: reportGeneralTypeIssues=false
if binding.policy is not None and binding.policy.__class__ == Policy:
raise PolicyEngineException(f"Policy '{binding.policy}' is root type")

View File

@ -15,8 +15,8 @@ class Migration(migrations.Migration):
name="policy_engine_mode",
field=models.TextField(
choices=[
("all", "all, all policies must pass"),
("any", "any, any policy must pass"),
("all", "ALL, all policies must pass"),
("any", "ANY, any policy must pass"),
],
default="all",
),
@ -27,8 +27,8 @@ class Migration(migrations.Migration):
name="policy_engine_mode",
field=models.TextField(
choices=[
("all", "all, all policies must pass"),
("any", "any, any policy must pass"),
("all", "ALL, all policies must pass"),
("any", "ANY, any policy must pass"),
],
default="any",
),

View File

@ -19,8 +19,10 @@ from authentik.policies.types import PolicyRequest, PolicyResult
class PolicyEngineMode(models.TextChoices):
"""Decide how results of multiple policies should be combined."""
MODE_ALL = "all", _("all, all policies must pass") # type: "PolicyEngineMode"
MODE_ANY = "any", _("any, any policy must pass") # type: "PolicyEngineMode"
# pyright: reportGeneralTypeIssues=false
MODE_ALL = "all", _("ALL, all policies must pass") # type: "PolicyEngineMode"
# pyright: reportGeneralTypeIssues=false
MODE_ANY = "any", _("ANY, any policy must pass") # type: "PolicyEngineMode"
class PolicyBindingModel(models.Model):

View File

@ -5,9 +5,7 @@ from django.dispatch import receiver
from structlog.stdlib import get_logger
from authentik.core.api.applications import user_app_cache_key
from authentik.core.models import Group, User
from authentik.policies.apps import GAUGE_POLICIES_CACHED
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel
from authentik.policies.types import CACHE_PREFIX
from authentik.root.monitoring import monitoring_set
@ -20,14 +18,12 @@ def monitoring_set_policies(sender, **kwargs):
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
@receiver(post_save, sender=Policy)
@receiver(post_save, sender=PolicyBinding)
@receiver(post_save, sender=PolicyBindingModel)
@receiver(post_save, sender=Group)
@receiver(post_save, sender=User)
@receiver(post_save)
def invalidate_policy_cache(sender, instance, **_):
"""Invalidate Policy cache when policy is updated"""
if sender == Policy:
from authentik.policies.models import Policy, PolicyBinding
if isinstance(instance, Policy):
total = 0
for binding in PolicyBinding.objects.filter(policy=instance):
prefix = f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}_{binding.policy.pk.hex}*"

View File

@ -1,24 +0,0 @@
# Generated by Django 4.1.7 on 2023-05-06 16:18
from django.db import migrations, models
import authentik.providers.oauth2.models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_providers_oauth2",
"0015_accesstoken_auth_time_authorizationcode_auth_time_and_more",
),
]
operations = [
migrations.AlterField(
model_name="refreshtoken",
name="token",
field=models.TextField(
default=authentik.providers.oauth2.models.generate_client_secret
),
),
]

View File

@ -382,7 +382,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
"""OAuth2 Refresh Token, opaque"""
token = models.TextField(default=generate_client_secret)
token = models.TextField(default=generate_key)
_id_token = models.TextField(verbose_name=_("ID Token"))
@property

View File

@ -202,4 +202,5 @@ class HttpResponseRedirectScheme(HttpResponseRedirect):
**kwargs: Any,
) -> None:
self.allowed_schemes = allowed_schemes or ["http", "https", "ftp"]
# pyright: reportGeneralTypeIssues=false
super().__init__(redirect_to, *args, **kwargs)

View File

@ -2,12 +2,19 @@
from typing import Generic, TypeVar
from pydantic import ValidationError
from pydanticscim.service_provider import (
Bulk,
ChangePassword,
Filter,
Patch,
ServiceProviderConfiguration,
Sort,
)
from requests import RequestException, Session
from structlog.stdlib import get_logger
from authentik.lib.utils.http import get_http_session
from authentik.providers.scim.clients.exceptions import ResourceMissing, SCIMRequestException
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
from authentik.providers.scim.clients.exceptions import SCIMRequestException
from authentik.providers.scim.models import SCIMProvider
T = TypeVar("T")
@ -15,6 +22,18 @@ T = TypeVar("T")
SchemaType = TypeVar("SchemaType")
def default_service_provider_config() -> ServiceProviderConfiguration:
"""Fallback service provider configuration"""
return ServiceProviderConfiguration(
patch=Patch(supported=False),
bulk=Bulk(supported=False),
filter=Filter(supported=False),
changePassword=ChangePassword(supported=False),
sort=Sort(supported=False),
authenticationSchemes=[],
)
class SCIMClient(Generic[T, SchemaType]):
"""SCIM Client"""
@ -54,8 +73,6 @@ class SCIMClient(Generic[T, SchemaType]):
raise SCIMRequestException(None) from exc
self.logger.debug("scim request", path=path, method=method, **kwargs)
if response.status_code >= 400:
if response.status_code == 404:
raise ResourceMissing(response)
self.logger.warning(
"Failed to send SCIM request", path=path, method=method, response=response.text
)
@ -66,7 +83,7 @@ class SCIMClient(Generic[T, SchemaType]):
def get_service_provider_config(self):
"""Get Service provider config"""
default_config = ServiceProviderConfiguration.default()
default_config = default_service_provider_config()
try:
return ServiceProviderConfiguration.parse_obj(
self._request("GET", "/ServiceProviderConfig")

View File

@ -41,8 +41,3 @@ class SCIMRequestException(SentryIgnoredException):
except ValidationError:
pass
return super().__str__()
class ResourceMissing(SCIMRequestException):
"""Error raised when the provider raises a 404, meaning that we
should delete our internal ID and re-create the object"""

View File

@ -2,7 +2,7 @@
from deepmerge import always_merger
from pydantic import ValidationError
from pydanticscim.group import GroupMember
from pydanticscim.responses import PatchOp, PatchOperation
from pydanticscim.responses import PatchOp, PatchOperation, PatchRequest
from authentik.core.exceptions import PropertyMappingExpressionException
from authentik.core.models import Group
@ -10,13 +10,8 @@ from authentik.events.models import Event, EventAction
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.utils import delete_none_keys
from authentik.providers.scim.clients.base import SCIMClient
from authentik.providers.scim.clients.exceptions import (
ResourceMissing,
SCIMRequestException,
StopSync,
)
from authentik.providers.scim.clients.exceptions import StopSync
from authentik.providers.scim.clients.schema import Group as SCIMGroupSchema
from authentik.providers.scim.clients.schema import PatchRequest
from authentik.providers.scim.models import SCIMGroup, SCIMMapping, SCIMUser
@ -28,11 +23,15 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
scim_group = SCIMGroup.objects.filter(provider=self.provider, group=obj).first()
if not scim_group:
return self._create(obj)
try:
return self._update(obj, scim_group)
except ResourceMissing:
scim_group.delete()
return self._create(obj)
scim_group = self.to_scim(obj)
scim_group.id = scim_group.id
return self._request(
"PUT",
f"/Groups/{scim_group.id}",
data=scim_group.json(
exclude_unset=True,
),
)
def delete(self, obj: Group):
"""Delete group"""
@ -82,15 +81,12 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
users = list(obj.users.order_by("id").values_list("id", flat=True))
connections = SCIMUser.objects.filter(provider=self.provider, user__pk__in=users)
members = []
for user in connections:
members.append(
scim_group.members.append(
GroupMember(
value=user.id,
)
)
if members:
scim_group.members = members
return scim_group
def _create(self, group: Group):
@ -105,32 +101,13 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
)
SCIMGroup.objects.create(provider=self.provider, group=group, id=response["id"])
def _update(self, group: Group, connection: SCIMGroup):
"""Update existing group"""
scim_group = self.to_scim(group)
scim_group.id = connection.id
try:
return self._request(
"PUT",
f"/Groups/{scim_group.id}",
data=scim_group.json(
exclude_unset=True,
),
)
except SCIMRequestException:
# Some providers don't support PUT on groups, so this is mainly a fix for the initial
# sync, send patch add requests for all the users the group currently has
# TODO: send patch request for group name
users = list(group.users.order_by("id").values_list("id", flat=True))
return self._patch_add_users(group, users)
def _patch(
self,
group_id: str,
*ops: PatchOperation,
):
req = PatchRequest(Operations=ops)
self._request("PATCH", f"/Groups/{group_id}", data=req.json())
self._request("PATCH", f"/Groups/{group_id}", data=req.json(exclude_unset=True))
def update_group(self, group: Group, action: PatchOp, users_set: set[int]):
"""Update a group, either using PUT to replace it or PATCH if supported"""
@ -139,17 +116,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
return self._patch_add_users(group, users_set)
if action == PatchOp.remove:
return self._patch_remove_users(group, users_set)
try:
return self.write(group)
except SCIMRequestException as exc:
if self._config.is_fallback:
# Assume that provider does not support PUT and also doesn't support
# ServiceProviderConfig, so try PATCH as a fallback
if action == PatchOp.add:
return self._patch_add_users(group, users_set)
if action == PatchOp.remove:
return self._patch_remove_users(group, users_set)
raise exc
return self.write(group)
def _patch_add_users(self, group: Group, users_set: set[int]):
"""Add users in users_set to group"""
@ -166,8 +133,6 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
"id", flat=True
)
)
if len(user_ids) < 1:
return
self._patch(
scim_group.id,
PatchOperation(
@ -192,8 +157,6 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
"id", flat=True
)
)
if len(user_ids) < 1:
return
self._patch(
scim_group.id,
PatchOperation(

View File

@ -1,54 +1,17 @@
"""Custom SCIM schemas"""
from typing import Optional
from pydanticscim.group import Group as BaseGroup
from pydanticscim.responses import PatchRequest as BasePatchRequest
from pydanticscim.service_provider import Bulk, ChangePassword, Filter, Patch
from pydanticscim.service_provider import (
ServiceProviderConfiguration as BaseServiceProviderConfiguration,
)
from pydanticscim.service_provider import Sort
from pydanticscim.user import User as BaseUser
from pydanticscim.group import Group as SCIMGroupSchema
from pydanticscim.user import User as SCIMUserSchema
class User(BaseUser):
class User(SCIMUserSchema):
"""Modified User schema with added externalId field"""
externalId: Optional[str] = None
class Group(BaseGroup):
class Group(SCIMGroupSchema):
"""Modified Group schema with added externalId field"""
externalId: Optional[str] = None
class ServiceProviderConfiguration(BaseServiceProviderConfiguration):
"""ServiceProviderConfig with fallback"""
_is_fallback: Optional[bool] = False
@property
def is_fallback(self) -> bool:
"""Check if this service provider config was retrieved from the API endpoint
or a fallback was used"""
return self._is_fallback
@staticmethod
def default() -> "ServiceProviderConfiguration":
"""Get default configuration, which doesn't support any optional features as fallback"""
return ServiceProviderConfiguration(
patch=Patch(supported=False),
bulk=Bulk(supported=False),
filter=Filter(supported=False),
changePassword=ChangePassword(supported=False),
sort=Sort(supported=False),
authenticationSchemes=[],
_is_fallback=True,
)
class PatchRequest(BasePatchRequest):
"""PatchRequest which correctly sets schemas"""
schemas: tuple[str] = ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]

View File

@ -8,7 +8,7 @@ from authentik.events.models import Event, EventAction
from authentik.lib.utils.errors import exception_to_string
from authentik.policies.utils import delete_none_keys
from authentik.providers.scim.clients.base import SCIMClient
from authentik.providers.scim.clients.exceptions import ResourceMissing, StopSync
from authentik.providers.scim.clients.exceptions import StopSync
from authentik.providers.scim.clients.schema import User as SCIMUserSchema
from authentik.providers.scim.models import SCIMMapping, SCIMUser
@ -21,11 +21,7 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
scim_user = SCIMUser.objects.filter(provider=self.provider, user=obj).first()
if not scim_user:
return self._create(obj)
try:
return self._update(obj, scim_user)
except ResourceMissing:
scim_user.delete()
return self._create(obj)
return self._update(obj, scim_user)
def delete(self, obj: User):
"""Delete user"""

View File

@ -1,23 +0,0 @@
"""SCIM Sync"""
from django.core.management.base import BaseCommand
from structlog.stdlib import get_logger
from authentik.providers.scim.models import SCIMProvider
from authentik.providers.scim.tasks import scim_sync
LOGGER = get_logger()
class Command(BaseCommand):
"""Run sync for an SCIM Provider"""
def add_arguments(self, parser):
parser.add_argument("providers", nargs="+", type=str)
def handle(self, **options):
for provider_name in options["providers"]:
provider = SCIMProvider.objects.filter(name=provider_name).first()
if not provider:
LOGGER.warning("Provider does not exist", name=provider_name)
continue
scim_sync.delay(provider.pk).get()

View File

@ -1,9 +1,9 @@
"""SCIM Provider tasks"""
from typing import Any, Optional
from typing import Any
from celery.result import allow_join_result
from django.core.paginator import Paginator
from django.db.models import Model, QuerySet
from django.db.models import Model
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from pydanticscim.responses import PatchOp
@ -94,8 +94,7 @@ def scim_sync_users(page: int, provider_pk: int):
}
)
)
except StopSync as exc:
LOGGER.warning("Stopping sync", exc=exc)
except StopSync:
break
return messages
@ -127,8 +126,7 @@ def scim_sync_group(page: int, provider_pk: int):
}
)
)
except StopSync as exc:
LOGGER.warning("Stopping sync", exc=exc)
except StopSync:
break
return messages
@ -143,20 +141,6 @@ def scim_signal_direct(model: str, pk: Any, raw_op: str):
operation = PatchOp(raw_op)
for provider in SCIMProvider.objects.all():
client = client_for_model(provider, instance)
# Check if the object is allowed within the provider's restrictions
queryset: Optional[QuerySet] = None
if isinstance(instance, User):
queryset = provider.get_user_qs()
if isinstance(instance, Group):
queryset = provider.get_group_qs()
if not queryset:
continue
# The queryset we get from the provider must include the instance we've got given
# otherwise ignore this provider
if not queryset.filter(pk=instance.pk).exists():
continue
try:
if operation == PatchOp.add:
client.write(instance)
@ -173,13 +157,6 @@ def scim_signal_m2m(group_pk: str, action: str, pk_set: list[int]):
if not group:
return
for provider in SCIMProvider.objects.all():
# Check if the object is allowed within the provider's restrictions
queryset: QuerySet = provider.get_group_qs()
# The queryset we get from the provider must include the instance we've got given
# otherwise ignore this provider
if not queryset.filter(pk=group_pk).exists():
continue
client = SCIMGroupClient(provider)
try:
operation = None

View File

@ -6,7 +6,7 @@ from requests_mock import Mocker
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import Group, User
from authentik.lib.generators import generate_id
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
from authentik.providers.scim.clients.base import default_service_provider_config
from authentik.providers.scim.models import SCIMMapping, SCIMProvider
from authentik.providers.scim.tasks import scim_sync
@ -39,7 +39,7 @@ class SCIMMembershipTests(TestCase):
def test_member_add(self):
"""Test member add"""
config = ServiceProviderConfiguration.default()
config = default_service_provider_config()
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -117,14 +117,13 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)
def test_member_remove(self):
"""Test member remove"""
config = ServiceProviderConfiguration.default()
config = default_service_provider_config()
config.patch.supported = True
user_scim_id = generate_id()
group_scim_id = generate_id()
@ -202,8 +201,7 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)
@ -229,7 +227,6 @@ class SCIMMembershipTests(TestCase):
"path": "members",
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
]
},
)

View File

@ -14,6 +14,7 @@ class ChannelsStorage(SessionStorage):
"""Send contrib.messages over websocket"""
def __init__(self, request: HttpRequest) -> None:
# pyright: reportGeneralTypeIssues=false
super().__init__(request)
self.channel = get_channel_layer()

View File

@ -1,7 +1,6 @@
"""Dynamically set SameSite depending if the upstream connection is TLS or not"""
from hashlib import sha512
from time import time
from timeit import default_timer
from typing import Callable
from django.conf import settings
@ -131,28 +130,6 @@ class SessionMiddleware(UpstreamSessionMiddleware):
return response
class ChannelsLoggingMiddleware:
"""Logging middleware for channels"""
def __init__(self, inner):
self.inner = inner
async def __call__(self, scope, receive, send):
self.log(scope)
return await self.inner(scope, receive, send)
def log(self, scope: dict, **kwargs):
"""Log request"""
headers = dict(scope.get("headers", {}))
LOGGER.info(
scope["path"],
scheme="ws",
remote=scope.get("client", [""])[0],
user_agent=headers.get(b"user-agent", b"").decode(),
**kwargs,
)
class LoggingMiddleware:
"""Logger middleware"""
@ -162,14 +139,14 @@ class LoggingMiddleware:
self.get_response = get_response
def __call__(self, request: HttpRequest) -> HttpResponse:
start = default_timer()
start = time()
response = self.get_response(request)
status_code = response.status_code
kwargs = {
"request_id": request.request_id,
}
kwargs.update(getattr(response, "ak_context", {}))
self.log(request, status_code, int((default_timer() - start) * 1000), **kwargs)
self.log(request, status_code, int((time() - start) * 1000), **kwargs)
return response
def log(self, request: HttpRequest, status_code: int, runtime: int, **kwargs):

View File

@ -12,13 +12,12 @@ AUTHENTIK_SOURCES_OAUTH_TYPES = [
"authentik.sources.oauth.types.facebook",
"authentik.sources.oauth.types.github",
"authentik.sources.oauth.types.google",
"authentik.sources.oauth.types.mailcow",
"authentik.sources.oauth.types.oidc",
"authentik.sources.oauth.types.okta",
"authentik.sources.oauth.types.patreon",
"authentik.sources.oauth.types.reddit",
"authentik.sources.oauth.types.twitch",
"authentik.sources.oauth.types.twitter",
"authentik.sources.oauth.types.mailcow",
"authentik.sources.oauth.types.twitch",
]

View File

@ -66,7 +66,7 @@ class OAuthClient(BaseOAuthClient):
response.raise_for_status()
except RequestException as exc:
raise OAuthSourceException(
exc.response.text if exc.response else str(exc),
response=exc.response.text if exc.response else str(exc),
) from exc
return response.text

View File

@ -163,15 +163,6 @@ class DiscordOAuthSource(OAuthSource):
verbose_name_plural = _("Discord OAuth Sources")
class PatreonOAuthSource(OAuthSource):
"""Social Login using Patreon."""
class Meta:
abstract = True
verbose_name = _("Patreon OAuth Source")
verbose_name_plural = _("Patreon OAuth Sources")
class GoogleOAuthSource(OAuthSource):
"""Social Login using Google or Google Workspace (GSuite)."""

View File

@ -1,67 +0,0 @@
"""Patreon Type tests"""
from django.test import RequestFactory, TestCase
from authentik.sources.oauth.models import OAuthSource
from authentik.sources.oauth.types.patreon import PatreonOAuthCallback
PATREON_USER = {
"data": {
"attributes": {
"about": None,
"created": "2017-10-20T21:36:23+00:00",
"discord_id": None,
"email": "corgi@example.com",
"facebook": None,
"facebook_id": None,
"first_name": "Corgi",
"full_name": "Corgi The Dev",
"gender": 0,
"has_password": True,
"image_url": "https://c8.patreon.com/2/400/0000000",
"is_deleted": False,
"is_email_verified": False,
"is_nuked": False,
"is_suspended": False,
"last_name": "The Dev",
"social_connections": {
"deviantart": None,
"discord": None,
"facebook": None,
"reddit": None,
"spotify": None,
"twitch": None,
"twitter": None,
"youtube": None,
},
"thumb_url": "https://c8.patreon.com/2/100/0000000",
"twitch": None,
"twitter": None,
"url": "https://www.patreon.com/corgithedev",
"vanity": "corgithedev",
"youtube": None,
},
"id": "0000000",
"relationships": {"pledges": {"data": []}},
"type": "user",
},
"links": {"self": "https://www.patreon.com/api/user/0000000"},
}
class TestTypePatreon(TestCase):
"""OAuth Source tests"""
def setUp(self):
self.source = OAuthSource.objects.create(
name="test",
slug="test",
provider_type="Patreon",
)
self.factory = RequestFactory()
def test_enroll_context(self):
"""Test Patreon Enrollment context"""
ak_context = PatreonOAuthCallback().get_user_enroll_context(PATREON_USER)
self.assertEqual(ak_context["username"], PATREON_USER["data"]["attributes"]["vanity"])
self.assertEqual(ak_context["email"], PATREON_USER["data"]["attributes"]["email"])
self.assertEqual(ak_context["name"], PATREON_USER["data"]["attributes"]["full_name"])

View File

@ -59,6 +59,7 @@ class AppleOAuthClient(OAuth2Client):
"aud": "https://appleid.apple.com",
"sub": parts[0].strip(),
}
# pyright: reportGeneralTypeIssues=false
jwt = encode(payload, self.source.consumer_secret, "ES256", {"kid": parts[2].strip()})
LOGGER.debug("signing payload as secret key", payload=payload, jwt=jwt)
return jwt

View File

@ -1,50 +0,0 @@
"""Patreon OAuth Views"""
from typing import Any
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
from authentik.sources.oauth.models import OAuthSource
from authentik.sources.oauth.types.registry import SourceType, registry
from authentik.sources.oauth.views.callback import OAuthCallback
from authentik.sources.oauth.views.redirect import OAuthRedirect
class PatreonOAuthRedirect(OAuthRedirect):
"""Patreon OAuth2 Redirect"""
def get_additional_parameters(self, source: OAuthSource): # pragma: no cover
return {
"scope": ["openid", "email", "profile"],
}
class PatreonOAuthCallback(OAuthCallback):
"""Patreon OAuth2 Callback"""
client_class: UserprofileHeaderAuthClient
def get_user_id(self, info: dict[str, str]) -> str:
return info.get("data", {}).get("id")
def get_user_enroll_context(
self,
info: dict[str, Any],
) -> dict[str, Any]:
return {
"username": info.get("data", {}).get("attributes", {}).get("vanity"),
"email": info.get("data", {}).get("attributes", {}).get("email"),
"name": info.get("data", {}).get("attributes", {}).get("full_name"),
}
@registry.register()
class PatreonType(SourceType):
"""OpenIDConnect Type definition"""
callback_view = PatreonOAuthCallback
redirect_view = PatreonOAuthRedirect
name = "Patreon"
slug = "patreon"
authorization_url = "https://www.patreon.com/oauth2/authorize"
access_token_url = "https://www.patreon.com/api/oauth2/token" # nosec
profile_url = "https://www.patreon.com/api/oauth2/api/current_user"

View File

@ -33,6 +33,7 @@ class Command(BaseCommand):
template_context={},
)
try:
# pyright: reportGeneralTypeIssues=false
send_mail(message.__dict__, stage.pk)
finally:
if delete_stage:

View File

@ -114,10 +114,9 @@ class EmailStageView(ChallengeStageView):
user.is_active = True
user.save()
return self.executor.stage_ok()
if not user.is_authenticated:
# We'll only get here if there's no user in the flow plan context
# and no authenticated user either
self.logger.debug("Unauthenticated user", user=user)
if PLAN_CONTEXT_PENDING_USER not in self.executor.plan.context:
self.logger.debug("No pending user")
messages.error(self.request, _("No pending user."))
return self.executor.stage_invalid()
# Check if we've already sent the initial e-mail
if PLAN_CONTEXT_EMAIL_SENT not in self.executor.plan.context:

View File

@ -24,6 +24,7 @@ class TestEmailStageAPI(APITestCase):
def test_validate(self):
"""Test EmailStage's validation"""
self.assertEqual(
# pyright: reportGeneralTypeIssues=false
EmailStageSerializer().validate_template(EmailTemplates.ACCOUNT_CONFIRM),
EmailTemplates.ACCOUNT_CONFIRM,
)

File diff suppressed because it is too large Load Diff

4
go.mod
View File

@ -22,14 +22,14 @@ require (
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba
github.com/pires/go-proxyproto v0.7.0
github.com/prometheus/client_golang v1.15.1
github.com/prometheus/client_golang v1.15.0
github.com/sirupsen/logrus v1.9.0
github.com/spf13/cobra v1.7.0
github.com/stretchr/testify v1.8.2
goauthentik.io/api/v3 v3.2023041.3
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.7.0
golang.org/x/sync v0.2.0
golang.org/x/sync v0.1.0
gopkg.in/boj/redistore.v1 v1.0.0-20160128113310-fc113767cd6b
gopkg.in/yaml.v2 v2.4.0
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab

7
go.sum
View File

@ -278,8 +278,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac h1:jWKYCNlX4J5s8M0nHYkh7Y7c9gRVDEb3mq51j5J0F5M=
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac/go.mod h1:hoLfEwdY11HjRfKFH6KqnPsfxlo3BP6bJehpDv8t6sQ=
github.com/prometheus/client_golang v1.15.1 h1:8tXpTmJbyH5lydzFPoxSIJ0J46jdh3tylbvM1xCv0LI=
github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk=
github.com/prometheus/client_golang v1.15.0 h1:5fCgGYogn0hFdhyhLbw7hEsWxufKtY9klyvdNfFlFhM=
github.com/prometheus/client_golang v1.15.0/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4=
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
@ -447,9 +447,8 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=

View File

@ -6,7 +6,6 @@ import (
"github.com/pires/go-proxyproto"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils"
)
func (ls *LDAPServer) getCertificates(info *tls.ClientHelloInfo) (*tls.Certificate, error) {
@ -39,8 +38,11 @@ func (ls *LDAPServer) getCertificates(info *tls.ClientHelloInfo) (*tls.Certifica
func (ls *LDAPServer) StartLDAPTLSServer() error {
listen := config.Get().Listen.LDAPS
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetCertificate = ls.getCertificates
tlsConfig := &tls.Config{
MinVersion: tls.VersionTLS12,
MaxVersion: tls.VersionTLS12,
GetCertificate: ls.getCertificates,
}
ln, err := net.Listen("tcp", listen)
if err != nil {

View File

@ -18,7 +18,6 @@ import (
"goauthentik.io/internal/outpost/ldap/search"
"goauthentik.io/internal/outpost/ldap/server"
"goauthentik.io/internal/outpost/ldap/utils"
"goauthentik.io/internal/outpost/ldap/utils/paginator"
)
type DirectSearcher struct {
@ -125,10 +124,15 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
return nil
}
u := paginator.FetchUsers(searchReq)
u, _, err := searchReq.Execute()
uapisp.Finish()
users = &u
if err != nil {
req.Log().WithError(err).Warning("failed to get users")
return err
}
users = &u.Results
} else {
if flags.UserInfo == nil {
uapisp := sentry.StartSpan(errCtx, "authentik.providers.ldap.search.api_user")
@ -166,24 +170,29 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
searchReq = searchReq.MembersByPk([]int32{flags.UserPk})
}
g := paginator.FetchGroups(searchReq)
g, _, err := searchReq.Execute()
gapisp.Finish()
req.Log().WithField("count", len(g)).Trace("Got results from API")
if err != nil {
req.Log().WithError(err).Warning("failed to get groups")
return err
}
req.Log().WithField("count", len(g.Results)).Trace("Got results from API")
if !flags.CanSearch {
for i, results := range g {
for i, results := range g.Results {
// If they can't search, remove any users from the group results except the one we're looking for.
g[i].Users = []int32{flags.UserPk}
g.Results[i].Users = []int32{flags.UserPk}
for _, u := range results.UsersObj {
if u.Pk == flags.UserPk {
g[i].UsersObj = []api.GroupMember{u}
g.Results[i].UsersObj = []api.GroupMember{u}
break
}
}
}
}
groups = &g
groups = &g.Results
return nil
})
}

View File

@ -0,0 +1,63 @@
package memory
import (
"context"
"goauthentik.io/api/v3"
)
const pageSize = 100
func (ms *MemorySearcher) FetchUsers() []api.User {
fetchUsersOffset := func(page int) (*api.PaginatedUserList, error) {
users, _, err := ms.si.GetAPIClient().CoreApi.CoreUsersList(context.TODO()).Page(int32(page)).PageSize(pageSize).Execute()
if err != nil {
ms.log.WithError(err).Warning("failed to update users")
return nil, err
}
ms.log.WithField("page", page).WithField("count", len(users.Results)).Debug("fetched users")
return users, nil
}
page := 1
users := make([]api.User, 0)
for {
apiUsers, err := fetchUsersOffset(page)
if err != nil {
return users
}
users = append(users, apiUsers.Results...)
if apiUsers.Pagination.Next > 0 {
page += 1
} else {
break
}
}
return users
}
func (ms *MemorySearcher) FetchGroups() []api.Group {
fetchGroupsOffset := func(page int) (*api.PaginatedGroupList, error) {
groups, _, err := ms.si.GetAPIClient().CoreApi.CoreGroupsList(context.TODO()).Page(int32(page)).PageSize(pageSize).Execute()
if err != nil {
ms.log.WithError(err).Warning("failed to update groups")
return nil, err
}
ms.log.WithField("page", page).WithField("count", len(groups.Results)).Debug("fetched groups")
return groups, nil
}
page := 1
groups := make([]api.Group, 0)
for {
apiGroups, err := fetchGroupsOffset(page)
if err != nil {
return groups
}
groups = append(groups, apiGroups.Results...)
if apiGroups.Pagination.Next > 0 {
page += 1
} else {
break
}
}
return groups
}

View File

@ -1,7 +1,6 @@
package memory
import (
"context"
"errors"
"fmt"
"strings"
@ -17,7 +16,6 @@ import (
"goauthentik.io/internal/outpost/ldap/search"
"goauthentik.io/internal/outpost/ldap/server"
"goauthentik.io/internal/outpost/ldap/utils"
"goauthentik.io/internal/outpost/ldap/utils/paginator"
)
type MemorySearcher struct {
@ -34,8 +32,8 @@ func NewMemorySearcher(si server.LDAPServerInstance) *MemorySearcher {
log: log.WithField("logger", "authentik.outpost.ldap.searcher.memory"),
}
ms.log.Debug("initialised memory searcher")
ms.users = paginator.FetchUsers(ms.si.GetAPIClient().CoreApi.CoreUsersList(context.TODO()))
ms.groups = paginator.FetchGroups(ms.si.GetAPIClient().CoreApi.CoreGroupsList(context.TODO()))
ms.users = ms.FetchUsers()
ms.groups = ms.FetchGroups()
return ms
}

View File

@ -1,64 +0,0 @@
package paginator
import (
log "github.com/sirupsen/logrus"
"goauthentik.io/api/v3"
)
const PageSize = 100
func FetchUsers(req api.ApiCoreUsersListRequest) []api.User {
fetchUsersOffset := func(page int) (*api.PaginatedUserList, error) {
users, _, err := req.Page(int32(page)).PageSize(PageSize).Execute()
if err != nil {
log.WithError(err).Warning("failed to update users")
return nil, err
}
log.WithField("page", page).WithField("count", len(users.Results)).Debug("fetched users")
return users, nil
}
page := 1
users := make([]api.User, 0)
for {
apiUsers, err := fetchUsersOffset(page)
if err != nil {
log.WithError(err).WithField("page", page).Warn("Failed to fetch user page")
continue
}
users = append(users, apiUsers.Results...)
if apiUsers.Pagination.Next > 0 {
page += 1
} else {
break
}
}
return users
}
func FetchGroups(req api.ApiCoreGroupsListRequest) []api.Group {
fetchGroupsOffset := func(page int) (*api.PaginatedGroupList, error) {
groups, _, err := req.Page(int32(page)).PageSize(PageSize).Execute()
if err != nil {
log.WithError(err).Warning("failed to update groups")
return nil, err
}
log.WithField("page", page).WithField("count", len(groups.Results)).Debug("fetched groups")
return groups, nil
}
page := 1
groups := make([]api.Group, 0)
for {
apiGroups, err := fetchGroupsOffset(page)
if err != nil {
log.WithError(err).WithField("page", page).Warn("Failed to fetch group page")
continue
}
groups = append(groups, apiGroups.Results...)
if apiGroups.Pagination.Next > 0 {
page += 1
} else {
break
}
}
return groups
}

View File

@ -18,7 +18,6 @@ import (
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/proxyv2/application"
"goauthentik.io/internal/outpost/proxyv2/metrics"
"goauthentik.io/internal/utils"
sentryutils "goauthentik.io/internal/utils/sentry"
"goauthentik.io/internal/utils/web"
)
@ -130,8 +129,11 @@ func (ps *ProxyServer) ServeHTTP() {
// ServeHTTPS constructs a net.Listener and starts handling HTTPS requests
func (ps *ProxyServer) ServeHTTPS() {
listenAddress := config.Get().Listen.HTTPS
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetCertificate = ps.getCertificates
config := &tls.Config{
MinVersion: tls.VersionTLS12,
MaxVersion: tls.VersionTLS12,
GetCertificate: ps.getCertificates,
}
ln, err := net.Listen("tcp", listenAddress)
if err != nil {
@ -141,7 +143,7 @@ func (ps *ProxyServer) ServeHTTPS() {
proxyListener := &proxyproto.Listener{Listener: web.TCPKeepAliveListener{TCPListener: ln.(*net.TCPListener)}}
defer proxyListener.Close()
tlsListener := tls.NewListener(proxyListener, tlsConfig)
tlsListener := tls.NewListener(proxyListener, config)
ps.log.WithField("listen", listenAddress).Info("Starting HTTPS server")
ps.serve(tlsListener)
ps.log.WithField("listen", listenAddress).Info("Stopping HTTPS server")

View File

@ -1,26 +0,0 @@
package utils
import "crypto/tls"
func GetTLSConfig() *tls.Config {
tlsConfig := &tls.Config{
MinVersion: tls.VersionTLS12,
MaxVersion: tls.VersionTLS12,
}
// Insecure SWEET32 attack ciphers, TLS config uses a fallback
insecureCiphersIds := []uint16{
tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,
}
defaultSecureCiphers := []uint16{}
for _, cs := range tls.CipherSuites() {
for _, icsId := range insecureCiphersIds {
if cs.ID != icsId {
defaultSecureCiphers = append(defaultSecureCiphers, cs.ID)
}
}
}
tlsConfig.CipherSuites = defaultSecureCiphers
return tlsConfig
}

View File

@ -7,7 +7,6 @@ import (
"github.com/pires/go-proxyproto"
"goauthentik.io/internal/config"
"goauthentik.io/internal/crypto"
"goauthentik.io/internal/utils"
"goauthentik.io/internal/utils/web"
)
@ -36,8 +35,11 @@ func (ws *WebServer) GetCertificate() func(ch *tls.ClientHelloInfo) (*tls.Certif
// ServeHTTPS constructs a net.Listener and starts handling HTTPS requests
func (ws *WebServer) listenTLS() {
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetCertificate = ws.GetCertificate()
tlsConfig := &tls.Config{
MinVersion: tls.VersionTLS12,
MaxVersion: tls.VersionTLS12,
GetCertificate: ws.GetCertificate(),
}
ln, err := net.Listen("tcp", config.Get().Listen.HTTPS)
if err != nil {

View File

@ -1,5 +1,5 @@
# Stage 1: Build
FROM docker.io/golang:1.20.4-bullseye AS builder
FROM docker.io/golang:1.20.3-bullseye AS builder
WORKDIR /go/src/goauthentik.io

View File

@ -69,6 +69,7 @@ if __name__ == "__main__":
if not spec:
continue
mod = module_from_spec(spec)
# pyright: reportGeneralTypeIssues=false
spec.loader.exec_module(mod)
for name, sub in getmembers(mod, isclass):

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

236
poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
# This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -262,14 +262,14 @@ files = [
[[package]]
name = "astroid"
version = "2.15.4"
version = "2.15.2"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
{file = "astroid-2.15.4-py3-none-any.whl", hash = "sha256:a1b8543ef9d36ea777194bc9b17f5f8678d2c56ee6a45b2c2f17eec96f242347"},
{file = "astroid-2.15.4.tar.gz", hash = "sha256:c81e1c7fbac615037744d067a9bb5f9aeb655edf59b63ee8b59585475d6f80d8"},
{file = "astroid-2.15.2-py3-none-any.whl", hash = "sha256:dea89d9f99f491c66ac9c04ebddf91e4acf8bd711722175fe6245c0725cc19bb"},
{file = "astroid-2.15.2.tar.gz", hash = "sha256:6e61b85c891ec53b07471aec5878f4ac6446a41e590ede0f2ce095f39f7d49dd"},
]
[package.dependencies]
@ -878,63 +878,63 @@ files = [
[[package]]
name = "coverage"
version = "7.2.5"
version = "7.2.3"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"},
{file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"},
{file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"},
{file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"},
{file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"},
{file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"},
{file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"},
{file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"},
{file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"},
{file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"},
{file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"},
{file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"},
{file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"},
{file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"},
{file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"},
{file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"},
{file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"},
{file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"},
{file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"},
{file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"},
{file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"},
{file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"},
{file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"},
{file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"},
{file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"},
{file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"},
{file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"},
{file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"},
{file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"},
{file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"},
{file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"},
{file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"},
{file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"},
{file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"},
{file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"},
{file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"},
{file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"},
{file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"},
{file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"},
{file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"},
{file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"},
{file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"},
{file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"},
{file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"},
{file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"},
{file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"},
{file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"},
{file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"},
{file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"},
{file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"},
{file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"},
{file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"},
{file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"},
{file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"},
{file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"},
{file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"},
{file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"},
{file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"},
{file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"},
{file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"},
{file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"},
{file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"},
{file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"},
{file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"},
{file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"},
{file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"},
{file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"},
{file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"},
{file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"},
{file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"},
{file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"},
{file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"},
{file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"},
{file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"},
{file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"},
{file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"},
{file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"},
{file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"},
{file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"},
{file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"},
{file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"},
{file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"},
{file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"},
{file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"},
{file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"},
{file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"},
{file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"},
{file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"},
{file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"},
{file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"},
{file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"},
{file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"},
{file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"},
{file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"},
{file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"},
{file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"},
{file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"},
{file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"},
{file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"},
{file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"},
{file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"},
{file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"},
]
[package.extras]
@ -1083,14 +1083,14 @@ bcrypt = ["bcrypt"]
[[package]]
name = "django-filter"
version = "23.2"
version = "23.1"
description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "django-filter-23.2.tar.gz", hash = "sha256:2fe15f78108475eda525692813205fa6f9e8c1caf1ae65daa5862d403c6dbf00"},
{file = "django_filter-23.2-py3-none-any.whl", hash = "sha256:d12d8e0fc6d3eb26641e553e5d53b191eb8cec611427d4bdce0becb1f7c172b5"},
{file = "django-filter-23.1.tar.gz", hash = "sha256:dee5dcf2cea4d7f767e271b6d01f767fce7500676d5e5dc58dac8154000b87df"},
{file = "django_filter-23.1-py3-none-any.whl", hash = "sha256:e3c52ad83c32fb5882125105efb5fea2a1d6a85e7dc64b04ef52edbf14451b6c"},
]
[package.dependencies]
@ -1146,14 +1146,14 @@ qrcode = ["qrcode"]
[[package]]
name = "django-prometheus"
version = "2.3.1"
version = "2.2.0"
description = "Django middlewares to monitor your application with Prometheus.io."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "django-prometheus-2.3.1.tar.gz", hash = "sha256:f9c8b6c780c9419ea01043c63a437d79db2c33353451347894408184ad9c3e1e"},
{file = "django_prometheus-2.3.1-py2.py3-none-any.whl", hash = "sha256:cf9b26f7ba2e4568f08f8f91480a2882023f5908579681bcf06a4d2465f12168"},
{file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"},
{file = "django_prometheus-2.2.0-py2.py3-none-any.whl", hash = "sha256:e6616770d8820b8834762764bf1b76ec08e1b98e72a6f359d488a2e15fe3537c"},
]
[package.dependencies]
@ -1272,30 +1272,6 @@ websocket-client = ">=0.32.0"
[package.extras]
ssh = ["paramiko (>=2.4.3)"]
[[package]]
name = "drf-jsonschema-serializer"
version = "1.0.0"
description = "JSON Schema support for Django REST Framework"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "drf-jsonschema-serializer-1.0.0.tar.gz", hash = "sha256:aa58d03deba5a936bc0b0dbca4b69ee902886b7a0be130797f1d5e741b92e42b"},
{file = "drf_jsonschema_serializer-1.0.0-py3-none-any.whl", hash = "sha256:06401c94f1a2610797a26c390b701504b90b6b44683932daccbc250ea2aad3b1"},
]
[package.dependencies]
django = ">=3.2"
djangorestframework = ">=3.13"
jsonschema = ">=4.0.0"
[package.extras]
all-format-validators = ["fqdn", "idna", "isoduration", "jsonpointer", "rfc3339-validator", "rfc3987", "uri-template", "webcolors"]
coverage = ["pytest-cov"]
docs = ["sphinx", "sphinx-rtd-theme"]
release = ["bump2version", "twine"]
tests = ["black", "django-stubs[compatible-mypy]", "djangorestframework-stubs[compatible-mypy]", "flake8", "fqdn", "idna", "isoduration", "isort", "jsonpointer", "mypy", "pytest", "pytest-django", "rfc3339-validator", "rfc3987", "tox", "types-jsonschema", "uri-template", "webcolors"]
[[package]]
name = "drf-spectacular"
version = "0.26.2"
@ -1337,14 +1313,14 @@ files = [
[[package]]
name = "duo-client"
version = "5.0.1"
version = "4.7.1"
description = "Reference client for Duo Security APIs"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "duo_client-5.0.1-py2.py3-none-any.whl", hash = "sha256:8159b93a516979ae18e985bbca46ba3bf8ec27cd7ff6db464b37fa667d49c9b5"},
{file = "duo_client-5.0.1.tar.gz", hash = "sha256:a13c47b0cb91e3a0d33b2f99271a0a6c2de17f92dd0ad4c649a366a0533db0c7"},
{file = "duo_client-4.7.1-py2.py3-none-any.whl", hash = "sha256:35f1ea54111fa1304127e4d169bf553aee826db62b33c972c45f65a02756c820"},
{file = "duo_client-4.7.1.tar.gz", hash = "sha256:f859b5d5b434daf46674c74fd7c6f2d34002df3c54e519e64a284aeb491226ef"},
]
[package.dependencies]
@ -2643,18 +2619,18 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "2.17.3"
version = "2.17.2"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
{file = "pylint-2.17.3-py3-none-any.whl", hash = "sha256:a6cbb4c6e96eab4a3c7de7c6383c512478f58f88d95764507d84c899d656a89a"},
{file = "pylint-2.17.3.tar.gz", hash = "sha256:761907349e699f8afdcd56c4fe02f3021ab5b3a0fc26d19a9bfdc66c7d0d5cd5"},
{file = "pylint-2.17.2-py3-none-any.whl", hash = "sha256:001cc91366a7df2970941d7e6bbefcbf98694e00102c1f121c531a814ddc2ea8"},
{file = "pylint-2.17.2.tar.gz", hash = "sha256:1b647da5249e7c279118f657ca28b6aaebb299f86bf92affc632acf199f7adbb"},
]
[package.dependencies]
astroid = ">=2.15.4,<=2.17.0-dev0"
astroid = ">=2.15.2,<=2.17.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = {version = ">=0.3.6", markers = "python_version >= \"3.11\""}
isort = ">=4.2.5,<6"
@ -2850,14 +2826,14 @@ testing = ["Django", "django-configurations (>=2.0)"]
[[package]]
name = "pytest-github-actions-annotate-failures"
version = "0.2.0"
version = "0.1.8"
description = "pytest plugin to annotate failed tests with a workflow command for GitHub Actions"
category = "dev"
optional = false
python-versions = ">=3.7"
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*"
files = [
{file = "pytest-github-actions-annotate-failures-0.2.0.tar.gz", hash = "sha256:844ab626d389496e44f960b42f0a72cce29ae06d363426d17ea9ae1b4bef2288"},
{file = "pytest_github_actions_annotate_failures-0.2.0-py3-none-any.whl", hash = "sha256:8bcef65fed503faaa0524b59cfeccc8995130972dd7b008d64193cc41b9cde85"},
{file = "pytest-github-actions-annotate-failures-0.1.8.tar.gz", hash = "sha256:2d6e6cb5f8d0aae4a27a20cc4e20fabd3199a121c57f44bc48fe28e372e0be23"},
{file = "pytest_github_actions_annotate_failures-0.1.8-py2.py3-none-any.whl", hash = "sha256:6a882ff21672fa79deae8d917eb965a6bde2b25191e7632e1adfc23ffac008ab"},
]
[package.dependencies]
@ -3110,29 +3086,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.0.264"
version = "0.0.262"
description = "An extremely fast Python linter, written in Rust."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.0.264-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:ec2fa192c035b8b68cc2b91049c561cd69543e2b8c4d157d9aa7727320bedcca"},
{file = "ruff-0.0.264-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d97ba8db0fb601ffe9ee996ebb97c698e427a2fd4514fefbe7b803111354f783"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4564e0f245eb515c6ed63988c21e9c40bcfd485cd1ec63bdd790f9a81d301f15"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:323ae6c1702b26c96d0fbf939c5959c37e79021f86b70f63634df918bc77f36e"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18a29ed37bf8cfe6dce8a2db56c313a64c0804095108753621f3c3321e0c9c5f"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d628de91e2be7a83128526636097d2dd890669a06143f826f6c591d79aeefbc4"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c6eb4f979b661a2dd850d9ac803842bb7b66d4926de84f09c787af82590f73"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04ec5d75e4bca754cedd20d53e2ba4920d6259e7579abfb2e8e30c3c80e41b17"},
{file = "ruff-0.0.264-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71fd865ebacc1083259b3fb7e3eb45235a86e62e21830b8a6b067be0ec54aa2e"},
{file = "ruff-0.0.264-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:cd4f60ffc3eb15802c554a9c8581bf2117c4d3d06fbc57e0ba58f04cb1aaa47f"},
{file = "ruff-0.0.264-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:05ee163a046fc593d150179d23f4af447fb82f3e59cd34e031ea0868c65bb8e8"},
{file = "ruff-0.0.264-py3-none-musllinux_1_2_i686.whl", hash = "sha256:484e395d1984ab9e1e66bd42e7a5192decfee86998d07d36ee50b2fadccc8734"},
{file = "ruff-0.0.264-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:67326fdc9ac0a1b13e229c6e24e8d115863c52cd710faaaaa588851535281d6c"},
{file = "ruff-0.0.264-py3-none-win32.whl", hash = "sha256:5a8658ebcc37d62f72840cbdf564171c1a2b6831db482b4d917962541a2f4a44"},
{file = "ruff-0.0.264-py3-none-win_amd64.whl", hash = "sha256:068a82a29d80848a56e3d9d4308e6e0ca8b2ecdaf5ac342a292545a59b7f2c21"},
{file = "ruff-0.0.264-py3-none-win_arm64.whl", hash = "sha256:3e2c38449548e122f2612843a7c04e22b4fd491656955c57b8cb05df11639ad6"},
{file = "ruff-0.0.264.tar.gz", hash = "sha256:8fcd4b693ca1374eb7a5796581c90689f884f98f388740d94f0702fd30f8f78f"},
{file = "ruff-0.0.262-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c26c1abd420d041592d05d63aee8c6a18feb24aed4deb6e91129e9f2c7b4914a"},
{file = "ruff-0.0.262-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b379e9765afa679316e52288a942df085e590862f8945088936a7bce3116d8f3"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7e0ca6821aafbd2b059df3119fcd5881250721ca8e825789fd2c471f7c59be"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cca35e2aeddff72bb4379a1dabc134e0c0d25ebc754a2cb733a1f8d4dbbb5e0"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15bf5533ce169aebbafa00017987f673e879f60a625d932b464b8cdaf32a4fce"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3909e249d984c4517194005a1c30eaa0c3a6d906c789d9fc0c9c7e007fb3e759"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e2813013a19b3e147e840bdb2e42db5825b53b47364e58e7b467c5fa47ffda2"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d25a94996b2037e566c2a801c8b324c0a826194d5d4d90ad7c1ccb8cf06521fa"},
{file = "ruff-0.0.262-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ca04348372efc59f6ee808d903d35e0d352cf2c78e487757cd48b65104b83e"},
{file = "ruff-0.0.262-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:24f989363e9bb5d0283490298102a5218682e49ebf300e445d69e24bee03ac83"},
{file = "ruff-0.0.262-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3c24e678e43ca4b67e29cc9a7a54eea05f31a5898cbf17bfec47b68f08d32a60"},
{file = "ruff-0.0.262-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0baff3c9a22227358ea109c165efe62dbdd0f2b9fd5256567dda8682b444fe23"},
{file = "ruff-0.0.262-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:083bac6e238d8b7d5ac3618666ea63b7ac661cf94c5da160070a58e190082831"},
{file = "ruff-0.0.262-py3-none-win32.whl", hash = "sha256:15bbfa2d15c137717627e0d56b0e535ae297b734551e34e03fcc25d7642cf43a"},
{file = "ruff-0.0.262-py3-none-win_amd64.whl", hash = "sha256:973ac29193f718349cf5746b7d86dfeaf7d40e9651ed97790a9b9327305888b9"},
{file = "ruff-0.0.262-py3-none-win_arm64.whl", hash = "sha256:f102904ebe395acd2a181d295b98120acd7a63f732b691672977fc688674f4af"},
{file = "ruff-0.0.262.tar.gz", hash = "sha256:faea54231c265f5349975ba6f3d855b71881a01f391b2000c47740390c6d5f68"},
]
[[package]]
@ -3155,14 +3131,14 @@ urllib3 = {version = ">=1.26,<2.0", extras = ["socks"]}
[[package]]
name = "sentry-sdk"
version = "1.21.1"
version = "1.20.0"
description = "Python client for Sentry (https://sentry.io)"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "sentry-sdk-1.21.1.tar.gz", hash = "sha256:99c15556a23621be9f18c2955f7ce63321713bf1c0ad396b27b61399bac5f458"},
{file = "sentry_sdk-1.21.1-py2.py3-none-any.whl", hash = "sha256:092888f3abf7a2ea78f0bfcefc3e0465caee2b6f0efb26f538ccc60f95dca179"},
{file = "sentry-sdk-1.20.0.tar.gz", hash = "sha256:a3410381ae769a436c0852cce140a5e5e49f566a07fb7c2ab445af1302f6ad89"},
{file = "sentry_sdk-1.20.0-py2.py3-none-any.whl", hash = "sha256:0ad6bbbe78057b8031a07de7aca6d2a83234e51adc4d436eaf8d8c697184db71"},
]
[package.dependencies]
@ -3437,14 +3413,14 @@ wsproto = ">=0.14"
[[package]]
name = "twilio"
version = "8.2.0"
version = "8.1.0"
description = "Twilio API client and TwiML generator"
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "twilio-8.2.0-py2.py3-none-any.whl", hash = "sha256:23eceaec183995fc827e3bfad229cca6e1944bfd9604e57e2712e625b6e01223"},
{file = "twilio-8.2.0.tar.gz", hash = "sha256:0c19eb6a5b84dbcd15658e23a142df026297236e4d72ad9304fd95e7dbff2662"},
{file = "twilio-8.1.0-py2.py3-none-any.whl", hash = "sha256:19be48f21e799b9dd10e2e0a5633962438e04842864e806409f4f2dbe446a868"},
{file = "twilio-8.1.0.tar.gz", hash = "sha256:a31863119655cd3643f788099f6ea3fe74eea59ce3f65600f9a4931301311c08"},
]
[package.dependencies]
@ -3622,14 +3598,14 @@ files = [
[[package]]
name = "uvicorn"
version = "0.22.0"
version = "0.21.1"
description = "The lightning-fast ASGI server."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"},
{file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"},
{file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"},
{file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"},
]
[package.dependencies]
@ -3788,22 +3764,22 @@ files = [
[[package]]
name = "webauthn"
version = "1.8.1"
version = "1.8.0"
description = "Pythonic WebAuthn"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "webauthn-1.8.1-py3-none-any.whl", hash = "sha256:53914b8529df3e70bec0e41438eea8d99442ad3e54e32b86ce1ba83513bcf30e"},
{file = "webauthn-1.8.1.tar.gz", hash = "sha256:caca3cc62045923c542819169c22122bdb81efa0246c9d5208d073942d806bf3"},
{file = "webauthn-1.8.0-py3-none-any.whl", hash = "sha256:1b87d55acd86234475591e2f60163232ba98d91685f235803f258fc891ebb6c0"},
{file = "webauthn-1.8.0.tar.gz", hash = "sha256:0c981eb2575d66fa0a6586c2bfb24d9812dff8e158009a9030bc867717727709"},
]
[package.dependencies]
asn1crypto = ">=1.4.0"
cbor2 = ">=5.4.2.post1"
cryptography = ">=39.0.1"
cryptography = ">=36.0.1"
pydantic = ">=1.9.0"
pyOpenSSL = ">=23.0.0"
pyOpenSSL = ">=22.0.0"
[[package]]
name = "websocket-client"
@ -4176,4 +4152,4 @@ files = [
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "da0f14183137ec5d4fcd7df877f1488860bc26f795f8aaa19c78655f77e3f409"
content-hash = "82fc267d6041997d1410a951033cdb9f6c57d91df7d48acaecdbab320daab58e"

View File

@ -7,7 +7,7 @@ ENV NODE_ENV=production
RUN cd /static && npm ci && npm run build-proxy
# Stage 2: Build
FROM docker.io/golang:1.20.4-bullseye AS builder
FROM docker.io/golang:1.20.3-bullseye AS builder
WORKDIR /go/src/goauthentik.io

View File

@ -5,9 +5,6 @@ strictParameterNoneValue = true
strictDictionaryInference = true
strictListInference = true
reportOptionalMemberAccess = false
reportOptionalContextManager = false
# rest_framework's serializer's `validated_data` is typed as optional None
reportOptionalSubscript = false
# Sadly pyright still has issues with enums, and they fall under general type issues
# so we have to disable those for now
reportGeneralTypeIssues = false
@ -179,7 +176,6 @@ bump2version = "*"
colorama = "*"
coverage = { extras = ["toml"], version = "*" }
django-silk = "*"
drf-jsonschema-serializer = "*"
importlib-metadata = "*"
pylint = "*"
pylint-django = "*"

View File

@ -1,5 +1,5 @@
# Stage 1: Build
FROM docker.io/golang:1.20.4-bullseye AS builder
FROM docker.io/golang:1.20.3-bullseye AS builder
WORKDIR /go/src/goauthentik.io

View File

@ -6806,11 +6806,11 @@ paths:
- all
- any
description: |-
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
- in: query
name: re_evaluate_policies
schema:
@ -17552,11 +17552,11 @@ paths:
- all
- any
description: |-
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
- in: query
name: profile_url
schema:
@ -17921,11 +17921,11 @@ paths:
- all
- any
description: |-
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
- name: search
required: false
in: query
@ -18358,11 +18358,11 @@ paths:
- all
- any
description: |-
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
- in: query
name: pre_authentication_flow
schema:
@ -27912,7 +27912,6 @@ components:
readOnly: true
managed:
type: string
readOnly: true
nullable: true
title: Managed by authentik
description: Objects which are managed by authentik. These objects are created
@ -27925,7 +27924,6 @@ components:
- certificate_download_url
- fingerprint_sha1
- fingerprint_sha256
- managed
- name
- pk
- private_key_available
@ -27948,6 +27946,15 @@ components:
writeOnly: true
description: Optional Private Key. If this is set, you can use this keypair
for encryption.
managed:
type: string
nullable: true
minLength: 1
title: Managed by authentik
description: Objects which are managed by authentik. These objects are created
and updated automatically. This is flag only indicates that an object
can be overwritten by migrations. You can still modify the objects via
the API, but expect changes to be overwritten in a later update.
required:
- certificate_data
- name
@ -35642,6 +35649,15 @@ components:
writeOnly: true
description: Optional Private Key. If this is set, you can use this keypair
for encryption.
managed:
type: string
nullable: true
minLength: 1
title: Managed by authentik
description: Objects which are managed by authentik. These objects are created
and updated automatically. This is flag only indicates that an object
can be overwritten by migrations. You can still modify the objects via
the API, but expect changes to be overwritten in a later update.
PatchedConsentStageRequest:
type: object
description: ConsentStage Serializer
@ -37964,8 +37980,8 @@ components:
- any
type: string
description: |-
* `all` - all, all policies must pass
* `any` - any, any policy must pass
* `all` - ALL, all policies must pass
* `any` - ANY, any policy must pass
PolicyRequest:
type: object
description: Policy Serializer
@ -38425,13 +38441,12 @@ components:
- facebook
- github
- google
- mailcow
- openidconnect
- okta
- patreon
- reddit
- twitch
- twitter
- mailcow
- twitch
type: string
description: |-
* `apple` - Apple
@ -38440,13 +38455,12 @@ components:
* `facebook` - Facebook
* `github` - GitHub
* `google` - Google
* `mailcow` - Mailcow
* `openidconnect` - OpenID Connect
* `okta` - Okta
* `patreon` - Patreon
* `reddit` - reddit
* `twitch` - Twitch
* `twitter` - Twitter
* `mailcow` - Mailcow
* `twitch` - Twitch
ProxyMode:
enum:
- proxy

View File

@ -6,7 +6,6 @@ from authentik.lib.generators import generate_id
with open("local.env.yml", "w", encoding="utf-8") as _config:
safe_dump(
{
"debug": True,
"log_level": "debug",
"secret_key": generate_id(),
"postgresql": {

View File

@ -2,13 +2,13 @@ version: '3.7'
services:
chrome:
image: docker.io/selenium/standalone-chrome:110.0
image: selenium/standalone-chrome:110.0
volumes:
- /dev/shm:/dev/shm
network_mode: host
restart: always
mailpit:
image: docker.io/axllent/mailpit:v1.6.5
mailhog:
image: mailhog/mailhog:v1.0.1
ports:
- 1025:1025
- 8025:8025

View File

@ -78,13 +78,13 @@ class TestFlowsEnroll(SeleniumTestCase):
# Wait for the success message so we know the email is sent
wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, ".pf-c-form p")))
# Open Mailpit
# Open Mailhog
self.driver.get("http://localhost:8025")
# Click on first message
self.wait.until(ec.presence_of_element_located((By.CLASS_NAME, "message")))
self.driver.find_element(By.CLASS_NAME, "message").click()
self.driver.switch_to.frame(self.driver.find_element(By.ID, "preview-html"))
self.wait.until(ec.presence_of_element_located((By.CLASS_NAME, "msglist-message")))
self.driver.find_element(By.CLASS_NAME, "msglist-message").click()
self.driver.switch_to.frame(self.driver.find_element(By.CLASS_NAME, "tab-pane"))
self.driver.find_element(By.ID, "confirm").click()
self.driver.close()
self.driver.switch_to.window(self.driver.window_handles[0])

View File

@ -71,13 +71,13 @@ class TestFlowsRecovery(SeleniumTestCase):
# Wait for the success message so we know the email is sent
wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, ".pf-c-form p")))
# Open mailpit
# Open Mailhog
self.driver.get("http://localhost:8025")
# Click on first message
self.wait.until(ec.presence_of_element_located((By.CLASS_NAME, "message")))
self.driver.find_element(By.CLASS_NAME, "message").click()
self.driver.switch_to.frame(self.driver.find_element(By.ID, "preview-html"))
self.wait.until(ec.presence_of_element_located((By.CLASS_NAME, "msglist-message")))
self.driver.find_element(By.CLASS_NAME, "msglist-message").click()
self.driver.switch_to.frame(self.driver.find_element(By.CLASS_NAME, "tab-pane"))
self.driver.find_element(By.ID, "confirm").click()
self.driver.close()
self.driver.switch_to.window(self.driver.window_handles[0])

View File

@ -62,6 +62,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -118,6 +119,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"system/providers-oauth2.yaml",
@ -192,6 +194,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
def test_denied(self):

View File

@ -67,6 +67,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -115,6 +116,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -176,6 +178,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -247,6 +250,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"system/providers-oauth2.yaml",
@ -326,6 +330,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"system/providers-oauth2.yaml",

View File

@ -63,6 +63,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@ -108,6 +109,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@ -163,6 +165,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@apply_blueprint("system/providers-oauth2.yaml")
@ -232,6 +235,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
def test_authorization_denied(self):

View File

@ -63,6 +63,7 @@ class TestProviderOAuth2OIDCImplicit(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@ -108,6 +109,7 @@ class TestProviderOAuth2OIDCImplicit(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@ -158,6 +160,7 @@ class TestProviderOAuth2OIDCImplicit(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@apply_blueprint("system/providers-oauth2.yaml")
@ -223,6 +226,7 @@ class TestProviderOAuth2OIDCImplicit(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
def test_authorization_denied(self):

View File

@ -60,6 +60,7 @@ class TestProviderProxy(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -125,6 +126,7 @@ class TestProviderProxy(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -201,6 +203,7 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@reconcile_app("authentik_crypto")
@ -208,7 +211,7 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
"""Test proxy connectivity over websocket"""
outpost_connection_discovery() # pylint: disable=no-value-for-parameter
proxy: ProxyProvider = ProxyProvider.objects.create(
name=generate_id(),
name="proxy_provider",
authorization_flow=Flow.objects.get(
slug="default-provider-authorization-implicit-consent"
),
@ -222,7 +225,7 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
service_connection = DockerServiceConnection.objects.get(local=True)
outpost: Outpost = Outpost.objects.create(
name=generate_id(),
name="proxy_outpost",
type=OutpostType.PROXY,
service_connection=service_connection,
_config=asdict(OutpostConfig(authentik_host=self.live_server_url, log_level="debug")),
@ -241,7 +244,7 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
sleep(0.5)
state = outpost.state
self.assertGreaterEqual(len(state), 1)
self.assertTrue(len(state) >= 1)
# Make sure to delete the outpost to remove the container
outpost.delete()

View File

@ -62,6 +62,7 @@ class TestProviderSAML(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -131,6 +132,7 @@ class TestProviderSAML(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"system/providers-saml.yaml",
@ -214,6 +216,7 @@ class TestProviderSAML(SeleniumTestCase):
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"system/providers-saml.yaml",
@ -296,6 +299,7 @@ class TestProviderSAML(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -370,6 +374,7 @@ class TestProviderSAML(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
@ -418,6 +423,7 @@ class TestProviderSAML(SeleniumTestCase):
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(

View File

@ -146,6 +146,10 @@ class TestSourceOAuth2(SeleniumTestCase):
"default/flow-default-authentication-flow.yaml",
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
@apply_blueprint(
"default/flow-default-source-authentication.yaml",
"default/flow-default-source-enrollment.yaml",
@ -195,6 +199,14 @@ class TestSourceOAuth2(SeleniumTestCase):
self.assert_user(User(username="foo", name="admin", email="admin@example.com"))
@retry()
@apply_blueprint(
"default/flow-default-authentication-flow.yaml",
"default/flow-default-invalidation-flow.yaml",
)
@apply_blueprint(
"default/flow-default-provider-authorization-explicit-consent.yaml",
"default/flow-default-provider-authorization-implicit-consent.yaml",
)
def test_oauth_enroll_auth(self):
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
self.test_oauth_enroll()

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="256px" height="247px" viewBox="0 0 256 247" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
<g>
<path d="M45.1355837,0 L45.1355837,246.35001 L0,246.35001 L0,0 L45.1355837,0 Z M163.657111,0 C214.65668,0 256,41.3433196 256,92.3428889 C256,143.342458 214.65668,184.685778 163.657111,184.685778 C112.657542,184.685778 71.3142222,143.342458 71.3142222,92.3428889 C71.3142222,41.3433196 112.657542,0 163.657111,0 Z" fill="#FF424D"></path>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 587 B

View File

@ -1,31 +0,0 @@
import { formatter } from "@lingui/format-po-gettext";
export default {
sourceLocale: "en",
locales: [
"en",
"pseudo-LOCALE",
"fr_FR",
"tr",
"es",
"pl",
"zh_TW",
"zh-Hans",
"zh-Hant",
"de",
],
pseudoLocale: "pseudo-LOCALE",
fallbackLocales: {
"pseudo-LOCALE": "en",
"default": "en",
},
compileNamespace: "ts",
catalogs: [
{
path: "src/locales/{locale}",
include: ["src"],
exclude: ["**/node_modules/**", "**/dist/**"],
},
],
format: formatter({ lineNumbers: false }),
};

1510
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,48 +15,84 @@
"tsc": "lingui compile && tsc --noEmit -p .",
"background-image": "npx @squoosh/cli -d src/assets/images --resize '{\"enabled\":true,\"width\":2560,\"method\":\"lanczos3\",\"fitMethod\":\"contain\",\"premultiply\":true,\"linearRGB\":true}' --mozjpeg '{\"quality\":75,\"baseline\":false,\"arithmetic\":false,\"progressive\":true,\"optimize_coding\":true,\"smoothing\":0,\"color_space\":3,\"quant_table\":3,\"trellis_multipass\":false,\"trellis_opt_zero\":false,\"trellis_opt_table\":false,\"trellis_loops\":1,\"auto_subsample\":true,\"chroma_subsample\":2,\"separate_chroma_quality\":false,\"chroma_quality\":75}' src/assets/images/flow_background.jpg"
},
"lingui": {
"sourceLocale": "en",
"locales": [
"en",
"pseudo-LOCALE",
"fr_FR",
"tr",
"es",
"pl",
"zh_TW",
"zh-Hans",
"zh-Hant",
"de"
],
"formatOptions": {
"lineNumbers": false
},
"pseudoLocale": "pseudo-LOCALE",
"fallbackLocales": {
"pseudo-LOCALE": "en",
"default": "en"
},
"format": "po-gettext",
"compileNamespace": "ts",
"catalogs": [
{
"path": "src/locales/{locale}",
"include": [
"src"
],
"exclude": [
"**/node_modules/**",
"**/dist/**"
]
}
]
},
"dependencies": {
"@babel/core": "^7.21.8",
"@babel/core": "^7.21.4",
"@babel/plugin-proposal-decorators": "^7.21.0",
"@babel/plugin-transform-runtime": "^7.21.4",
"@babel/preset-env": "^7.21.5",
"@babel/preset-typescript": "^7.21.5",
"@babel/preset-env": "^7.21.4",
"@babel/preset-typescript": "^7.21.4",
"@codemirror/lang-html": "^6.4.3",
"@codemirror/lang-javascript": "^6.1.7",
"@codemirror/lang-python": "^6.1.2",
"@codemirror/lang-xml": "^6.0.2",
"@codemirror/legacy-modes": "^6.3.2",
"@codemirror/theme-one-dark": "^6.1.2",
"@formatjs/intl-listformat": "^7.2.2",
"@formatjs/intl-listformat": "^7.2.1",
"@fortawesome/fontawesome-free": "^6.4.0",
"@goauthentik/api": "^2023.4.1-1683455546",
"@goauthentik/api": "^2023.4.1-1681914191",
"@hcaptcha/types": "^1.0.3",
"@jackfranklin/rollup-plugin-markdown": "^0.4.0",
"@lingui/cli": "^4.0.0",
"@lingui/core": "^4.0.0",
"@lingui/detect-locale": "^4.0.0",
"@lingui/format-po-gettext": "^4.0.0",
"@lingui/macro": "^4.0.0",
"@lingui/cli": "^3.17.2",
"@lingui/core": "^3.17.2",
"@lingui/detect-locale": "^3.17.2",
"@lingui/macro": "^3.17.2",
"@patternfly/patternfly": "^4.224.2",
"@rollup/plugin-babel": "^6.0.3",
"@rollup/plugin-commonjs": "^24.1.0",
"@rollup/plugin-node-resolve": "^15.0.2",
"@rollup/plugin-replace": "^5.0.2",
"@rollup/plugin-typescript": "^11.1.0",
"@sentry/browser": "^7.51.0",
"@sentry/tracing": "^7.51.0",
"@sentry/browser": "^7.49.0",
"@sentry/tracing": "^7.49.0",
"@squoosh/cli": "^0.7.3",
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
"@types/chart.js": "^2.9.37",
"@types/codemirror": "5.60.7",
"@types/grecaptcha": "^3.0.4",
"@typescript-eslint/eslint-plugin": "^5.59.2",
"@typescript-eslint/parser": "^5.59.2",
"@typescript-eslint/eslint-plugin": "^5.59.0",
"@typescript-eslint/parser": "^5.59.0",
"@webcomponents/webcomponentsjs": "^2.8.0",
"babel-plugin-macros": "^3.1.0",
"babel-plugin-tsconfig-paths": "^1.0.3",
"base64-js": "^1.5.1",
"chart.js": "^4.3.0",
"chart.js": "^4.2.1",
"chartjs-adapter-moment": "^1.0.1",
"codemirror": "^6.0.1",
"construct-style-sheets-polyfill": "^3.1.0",
@ -67,11 +103,11 @@
"eslint-plugin-custom-elements": "0.0.8",
"eslint-plugin-lit": "^1.8.3",
"fuse.js": "^6.6.2",
"lit": "^2.7.4",
"lit": "^2.7.2",
"mermaid": "^10.1.0",
"moment": "^2.29.4",
"prettier": "^2.8.8",
"pyright": "^1.1.306",
"pyright": "^1.1.304",
"rapidoc": "^9.3.4",
"rollup": "^2.79.1",
"rollup-plugin-copy": "^3.4.0",
@ -81,8 +117,8 @@
"ts-lit-plugin": "^1.2.1",
"tslib": "^2.5.0",
"turnstile-types": "^1.1.2",
"typescript": "^5.0.4",
"typescript": "^4.9.5",
"webcomponent-qr-code": "^1.1.1",
"yaml": "^2.2.2"
"yaml": "^2.2.1"
}
}

View File

@ -266,7 +266,11 @@ export class AdminInterface extends Interface {
<ak-sidebar-item>
<span slot="label">${t`Directory`}</span>
<ak-sidebar-item
path="/identity/users"
path=${`/identity/users;${encodeURIComponent(
JSON.stringify({
path: "users",
}),
)}`}
.activeWhen=${[`^/identity/users/(?<id>${ID_REGEX})$`]}
>
<span slot="label">${t`Users`}</span>

View File

@ -34,14 +34,14 @@ export class ApplicationCheckAccessForm extends Form<{ forUser: number }> {
return t`Successfully sent test-request.`;
}
async send(data: { forUser: number }): Promise<PolicyTestResult> {
send = async (data: { forUser: number }): Promise<PolicyTestResult> => {
this.request = data.forUser;
const result = await new CoreApi(DEFAULT_CONFIG).coreApplicationsCheckAccessRetrieve({
slug: this.application?.slug,
forUser: data.forUser,
});
return (this.result = result);
}
};
resetForm(): void {
super.resetForm();

Some files were not shown because too many files have changed in this diff Show More