Compare commits
4 Commits
version/20
...
outposts/s
Author | SHA1 | Date | |
---|---|---|---|
396925d1f0 | |||
10a8ed164e | |||
445dc01dca | |||
441916703d |
@ -1,16 +1,16 @@
|
||||
[bumpversion]
|
||||
current_version = 2025.2.0-rc2
|
||||
current_version = 2024.12.3
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
serialize =
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{rc_t}{rc_n}
|
||||
{major}.{minor}.{patch}
|
||||
message = release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:rc_t]
|
||||
values =
|
||||
values =
|
||||
rc
|
||||
final
|
||||
optional_value = final
|
||||
|
2
Makefile
2
Makefile
@ -21,7 +21,7 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/docs/developer-docs/api/reference/**' \
|
||||
-S 'website/developer-docs/api/reference/**' \
|
||||
-S '**/node_modules/**' \
|
||||
-S '**/dist/**' \
|
||||
$(PY_SOURCES) \
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2025.2.0"
|
||||
__version__ = "2024.12.3"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -50,6 +50,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDevice,
|
||||
@ -71,7 +72,6 @@ from authentik.providers.oauth2.models import (
|
||||
DeviceToken,
|
||||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.rac.models import ConnectionToken
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
|
@ -4,7 +4,6 @@ from json import loads
|
||||
|
||||
from django.db.models import Prefetch
|
||||
from django.http import Http404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.utils import (
|
||||
@ -82,37 +81,9 @@ class GroupSerializer(ModelSerializer):
|
||||
if not self.instance or not parent:
|
||||
return parent
|
||||
if str(parent.group_uuid) == str(self.instance.group_uuid):
|
||||
raise ValidationError(_("Cannot set group as parent of itself."))
|
||||
raise ValidationError("Cannot set group as parent of itself.")
|
||||
return parent
|
||||
|
||||
def validate_is_superuser(self, superuser: bool):
|
||||
"""Ensure that the user creating this group has permissions to set the superuser flag"""
|
||||
request: Request = self.context.get("request", None)
|
||||
if not request:
|
||||
return superuser
|
||||
# If we're updating an instance, and the state hasn't changed, we don't need to check perms
|
||||
if self.instance and superuser == self.instance.is_superuser:
|
||||
return superuser
|
||||
user: User = request.user
|
||||
perm = (
|
||||
"authentik_core.enable_group_superuser"
|
||||
if superuser
|
||||
else "authentik_core.disable_group_superuser"
|
||||
)
|
||||
has_perm = user.has_perm(perm)
|
||||
if self.instance and not has_perm:
|
||||
has_perm = user.has_perm(perm, self.instance)
|
||||
if not has_perm:
|
||||
raise ValidationError(
|
||||
_(
|
||||
(
|
||||
"User does not have permission to set "
|
||||
"superuser status to {superuser_status}."
|
||||
).format_map({"superuser_status": superuser})
|
||||
)
|
||||
)
|
||||
return superuser
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
|
@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-30 23:55
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="group",
|
||||
options={
|
||||
"permissions": [
|
||||
("add_user_to_group", "Add user to group"),
|
||||
("remove_user_from_group", "Remove user from group"),
|
||||
("enable_group_superuser", "Enable superuser status"),
|
||||
("disable_group_superuser", "Disable superuser status"),
|
||||
],
|
||||
"verbose_name": "Group",
|
||||
"verbose_name_plural": "Groups",
|
||||
},
|
||||
),
|
||||
]
|
@ -204,8 +204,6 @@ class Group(SerializerModel, AttributesMixin):
|
||||
permissions = [
|
||||
("add_user_to_group", _("Add user to group")),
|
||||
("remove_user_from_group", _("Remove user from group")),
|
||||
("enable_group_superuser", _("Enable superuser status")),
|
||||
("disable_group_superuser", _("Disable superuser status")),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
|
@ -35,7 +35,8 @@ from authentik.flows.planner import (
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET
|
||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.utils import delete_none_values
|
||||
@ -46,9 +47,8 @@ from authentik.stages.user_write.stage import PLAN_CONTEXT_USER_PATH
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||
|
||||
|
||||
class MessageStage(StageView):
|
||||
@ -219,28 +219,28 @@ class SourceFlowManager:
|
||||
}
|
||||
)
|
||||
flow_context.update(self.policy_context)
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
flow_slug = token.flow.slug
|
||||
token.delete()
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
self.request.GET,
|
||||
flow_slug=flow_slug,
|
||||
)
|
||||
flow_context.setdefault(PLAN_CONTEXT_REDIRECT, final_redirect)
|
||||
|
||||
if not flow:
|
||||
# We only check for the flow token here if we don't have a flow, otherwise we rely on
|
||||
# SESSION_KEY_SOURCE_FLOW_STAGES to delegate the usage of this token and dynamically add
|
||||
# stages that deal with this token to return to another flow
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info(
|
||||
"Replacing source flow with overridden flow", flow=token.flow.slug
|
||||
)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
plan.context.update(flow_context)
|
||||
for stage in self.get_stages_to_append(flow):
|
||||
plan.append_stage(stage)
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
redirect = plan.to_redirect(self.request, token.flow)
|
||||
token.delete()
|
||||
return redirect
|
||||
return bad_request_message(
|
||||
self.request,
|
||||
_("Configured flow does not exist."),
|
||||
@ -259,8 +259,6 @@ class SourceFlowManager:
|
||||
if stages:
|
||||
for stage in stages:
|
||||
plan.append_stage(stage)
|
||||
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
||||
plan.append_stage(stage)
|
||||
return plan.to_redirect(self.request, flow)
|
||||
|
||||
def handle_auth(
|
||||
@ -297,8 +295,6 @@ class SourceFlowManager:
|
||||
# When request isn't authenticated we jump straight to auth
|
||||
if not self.request.user.is_authenticated:
|
||||
return self.handle_auth(connection)
|
||||
# When an override flow token exists we actually still use a flow for link
|
||||
# to continue the existing flow we came from
|
||||
if SESSION_KEY_OVERRIDE_FLOW_TOKEN in self.request.session:
|
||||
return self._prepare_flow(None, connection)
|
||||
connection.save()
|
||||
|
@ -67,8 +67,6 @@ def clean_expired_models(self: SystemTask):
|
||||
raise ImproperlyConfigured(
|
||||
"Invalid session_storage setting, allowed values are db and cache"
|
||||
)
|
||||
if CONFIG.get("session_storage", "cache") == "db":
|
||||
DBSessionStore.clear_expired()
|
||||
LOGGER.debug("Expired sessions", model=AuthenticatedSession, amount=amount)
|
||||
|
||||
messages.append(f"Expired {amount} {AuthenticatedSession._meta.verbose_name_plural}")
|
||||
|
@ -4,7 +4,7 @@ from django.urls.base import reverse
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Group
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
@ -14,7 +14,7 @@ class TestGroupsAPI(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.login_user = create_test_user()
|
||||
self.user = create_test_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_list_with_users(self):
|
||||
"""Test listing with users"""
|
||||
@ -109,57 +109,3 @@ class TestGroupsAPI(APITestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_superuser_no_perm(self):
|
||||
"""Test creating a superuser group without permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to True."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_perm(self):
|
||||
"""Test updating a superuser group without permission"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"is_superuser": False},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content,
|
||||
{"is_superuser": ["User does not have permission to set superuser status to False."]},
|
||||
)
|
||||
|
||||
def test_superuser_update_no_change(self):
|
||||
"""Test updating a superuser group without permission
|
||||
and without changing the superuser status"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
assign_perm("view_group", self.login_user, group)
|
||||
assign_perm("change_group", self.login_user, group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
|
||||
def test_superuser_create(self):
|
||||
"""Test creating a superuser group with permission"""
|
||||
assign_perm("authentik_core.add_group", self.login_user)
|
||||
assign_perm("authentik_core.enable_group_superuser", self.login_user)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:group-list"),
|
||||
data={"name": generate_id(), "is_superuser": True},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
@ -97,8 +97,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
thread_kwargs: dict | None = None,
|
||||
**_,
|
||||
):
|
||||
if not self.enabled:
|
||||
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
|
||||
if not should_log_model(instance):
|
||||
return None
|
||||
thread_kwargs = {}
|
||||
@ -124,8 +122,6 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
):
|
||||
thread_kwargs = {}
|
||||
m2m_field = None
|
||||
if not self.enabled:
|
||||
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
|
||||
# For the audit log we don't care about `pre_` or `post_` so we trim that part off
|
||||
_, _, action_direction = action.partition("_")
|
||||
# resolve the "through" model to an actual field
|
||||
|
@ -6,12 +6,13 @@ from rest_framework.viewsets import GenericViewSet
|
||||
from authentik.core.api.groups import GroupMemberSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.providers.rac.api.endpoints import EndpointSerializer
|
||||
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||
from authentik.providers.rac.models import ConnectionToken
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
|
||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||
|
||||
|
||||
class ConnectionTokenSerializer(ModelSerializer):
|
||||
class ConnectionTokenSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
"""ConnectionToken Serializer"""
|
||||
|
||||
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
@ -14,9 +14,10 @@ from structlog.stdlib import get_logger
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
||||
from authentik.enterprise.providers.rac.models import Endpoint
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||
from authentik.providers.rac.models import Endpoint
|
||||
from authentik.rbac.filters import ObjectFilter
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -27,7 +28,7 @@ def user_endpoint_cache_key(user_pk: str) -> str:
|
||||
return f"goauthentik.io/providers/rac/endpoint_access/{user_pk}"
|
||||
|
||||
|
||||
class EndpointSerializer(ModelSerializer):
|
||||
class EndpointSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
"""Endpoint Serializer"""
|
||||
|
||||
provider_obj = RACProviderSerializer(source="provider", read_only=True)
|
@ -10,7 +10,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from authentik.core.api.property_mappings import PropertyMappingSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField
|
||||
from authentik.providers.rac.models import RACPropertyMapping
|
||||
from authentik.enterprise.providers.rac.models import RACPropertyMapping
|
||||
|
||||
|
||||
class RACPropertyMappingSerializer(PropertyMappingSerializer):
|
@ -5,10 +5,11 @@ from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.providers.rac.models import RACProvider
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.rac.models import RACProvider
|
||||
|
||||
|
||||
class RACProviderSerializer(ProviderSerializer):
|
||||
class RACProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||
"""RACProvider Serializer"""
|
||||
|
||||
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
14
authentik/enterprise/providers/rac/apps.py
Normal file
14
authentik/enterprise/providers/rac/apps.py
Normal file
@ -0,0 +1,14 @@
|
||||
"""RAC app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderRAC(EnterpriseConfig):
|
||||
"""authentik enterprise rac app config"""
|
||||
|
||||
name = "authentik.enterprise.providers.rac"
|
||||
label = "authentik_providers_rac"
|
||||
verbose_name = "authentik Enterprise.Providers.RAC"
|
||||
default = True
|
||||
mountpoint = ""
|
||||
ws_mountpoint = "authentik.enterprise.providers.rac.urls"
|
@ -7,22 +7,22 @@ from channels.generic.websocket import AsyncWebsocketConsumer
|
||||
from django.http.request import QueryDict
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken, RACProvider
|
||||
from authentik.outposts.consumer import OUTPOST_GROUP_INSTANCE
|
||||
from authentik.outposts.models import Outpost, OutpostState, OutpostType
|
||||
from authentik.providers.rac.models import ConnectionToken, RACProvider
|
||||
|
||||
# Global broadcast group, which messages are sent to when the outpost connects back
|
||||
# to authentik for a specific connection
|
||||
# The `RACClientConsumer` consumer adds itself to this group on connection,
|
||||
# and removes itself once it has been assigned a specific outpost channel
|
||||
RAC_CLIENT_GROUP = "group_rac_client"
|
||||
RAC_CLIENT_GROUP = "group_enterprise_rac_client"
|
||||
# A group for all connections in a given authentik session ID
|
||||
# A disconnect message is sent to this group when the session expires/is deleted
|
||||
RAC_CLIENT_GROUP_SESSION = "group_rac_client_%(session)s"
|
||||
RAC_CLIENT_GROUP_SESSION = "group_enterprise_rac_client_%(session)s"
|
||||
# A group for all connections with a specific token, which in almost all cases
|
||||
# is just one connection, however this is used to disconnect the connection
|
||||
# when the token is deleted
|
||||
RAC_CLIENT_GROUP_TOKEN = "group_rac_token_%(token)s" # nosec
|
||||
RAC_CLIENT_GROUP_TOKEN = "group_enterprise_rac_token_%(token)s" # nosec
|
||||
|
||||
# Step 1: Client connects to this websocket endpoint
|
||||
# Step 2: We prepare all the connection args for Guac
|
@ -3,7 +3,7 @@
|
||||
from channels.exceptions import ChannelFull
|
||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||
|
||||
from authentik.providers.rac.consumer_client import RAC_CLIENT_GROUP
|
||||
from authentik.enterprise.providers.rac.consumer_client import RAC_CLIENT_GROUP
|
||||
|
||||
|
||||
class RACOutpostConsumer(AsyncWebsocketConsumer):
|
@ -74,7 +74,7 @@ class RACProvider(Provider):
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.providers.rac.api.providers import RACProviderSerializer
|
||||
from authentik.enterprise.providers.rac.api.providers import RACProviderSerializer
|
||||
|
||||
return RACProviderSerializer
|
||||
|
||||
@ -100,7 +100,7 @@ class Endpoint(SerializerModel, PolicyBindingModel):
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.providers.rac.api.endpoints import EndpointSerializer
|
||||
from authentik.enterprise.providers.rac.api.endpoints import EndpointSerializer
|
||||
|
||||
return EndpointSerializer
|
||||
|
||||
@ -129,7 +129,7 @@ class RACPropertyMapping(PropertyMapping):
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.providers.rac.api.property_mappings import (
|
||||
from authentik.enterprise.providers.rac.api.property_mappings import (
|
||||
RACPropertyMappingSerializer,
|
||||
)
|
||||
|
@ -10,12 +10,12 @@ from django.dispatch import receiver
|
||||
from django.http import HttpRequest
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.providers.rac.api.endpoints import user_endpoint_cache_key
|
||||
from authentik.providers.rac.consumer_client import (
|
||||
from authentik.enterprise.providers.rac.api.endpoints import user_endpoint_cache_key
|
||||
from authentik.enterprise.providers.rac.consumer_client import (
|
||||
RAC_CLIENT_GROUP_SESSION,
|
||||
RAC_CLIENT_GROUP_TOKEN,
|
||||
)
|
||||
from authentik.providers.rac.models import ConnectionToken, Endpoint
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint
|
||||
|
||||
|
||||
@receiver(user_logged_out)
|
@ -3,7 +3,7 @@
|
||||
{% load authentik_core %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% versioned_script 'dist/rac/index-%v.js' %}" type="module"></script>
|
||||
<script src="{% versioned_script 'dist/enterprise/rac/index-%v.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
||||
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
||||
<link rel="icon" href="{{ tenant.branding_favicon_url }}">
|
@ -1,9 +1,16 @@
|
||||
"""Test RAC Provider"""
|
||||
|
||||
from datetime import timedelta
|
||||
from time import mktime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
@ -13,8 +20,21 @@ class TestAPI(APITestCase):
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_create(self):
|
||||
"""Test creation of RAC Provider"""
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:racprovider-list"),
|
@ -5,10 +5,10 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.providers.rac.models import Endpoint, Protocols, RACProvider
|
||||
|
||||
|
||||
class TestEndpointsAPI(APITestCase):
|
@ -4,14 +4,14 @@ from django.test import TransactionTestCase
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.rac.models import (
|
||||
from authentik.enterprise.providers.rac.models import (
|
||||
ConnectionToken,
|
||||
Endpoint,
|
||||
Protocols,
|
||||
RACPropertyMapping,
|
||||
RACProvider,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestModels(TransactionTestCase):
|
@ -1,17 +1,23 @@
|
||||
"""RAC Views tests"""
|
||||
|
||||
from datetime import timedelta
|
||||
from json import loads
|
||||
from time import mktime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.enterprise.providers.rac.models import Endpoint, Protocols, RACProvider
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.denied import AccessDeniedResponse
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.providers.rac.models import Endpoint, Protocols, RACProvider
|
||||
|
||||
|
||||
class TestRACViews(APITestCase):
|
||||
@ -33,8 +39,21 @@ class TestRACViews(APITestCase):
|
||||
provider=self.provider,
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_no_policy(self):
|
||||
"""Test request"""
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
@ -51,6 +70,18 @@ class TestRACViews(APITestCase):
|
||||
final_response = self.client.get(next_url)
|
||||
self.assertEqual(final_response.status_code, 200)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_app_deny(self):
|
||||
"""Test request (deny on app level)"""
|
||||
PolicyBinding.objects.create(
|
||||
@ -58,6 +89,7 @@ class TestRACViews(APITestCase):
|
||||
policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2),
|
||||
order=0,
|
||||
)
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
@ -67,6 +99,18 @@ class TestRACViews(APITestCase):
|
||||
)
|
||||
self.assertIsInstance(response, AccessDeniedResponse)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=int(mktime((now() + timedelta(days=3000)).timetuple())),
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_endpoint_deny(self):
|
||||
"""Test request (deny on endpoint level)"""
|
||||
PolicyBinding.objects.create(
|
||||
@ -74,6 +118,7 @@ class TestRACViews(APITestCase):
|
||||
policy=DummyPolicy.objects.create(name="deny", result=False, wait_min=1, wait_max=2),
|
||||
order=0,
|
||||
)
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(
|
||||
reverse(
|
@ -4,14 +4,14 @@ from channels.auth import AuthMiddleware
|
||||
from channels.sessions import CookieMiddleware
|
||||
from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.rac.api.connection_tokens import ConnectionTokenViewSet
|
||||
from authentik.enterprise.providers.rac.api.endpoints import EndpointViewSet
|
||||
from authentik.enterprise.providers.rac.api.property_mappings import RACPropertyMappingViewSet
|
||||
from authentik.enterprise.providers.rac.api.providers import RACProviderViewSet
|
||||
from authentik.enterprise.providers.rac.consumer_client import RACClientConsumer
|
||||
from authentik.enterprise.providers.rac.consumer_outpost import RACOutpostConsumer
|
||||
from authentik.enterprise.providers.rac.views import RACInterface, RACStartView
|
||||
from authentik.outposts.channels import TokenOutpostMiddleware
|
||||
from authentik.providers.rac.api.connection_tokens import ConnectionTokenViewSet
|
||||
from authentik.providers.rac.api.endpoints import EndpointViewSet
|
||||
from authentik.providers.rac.api.property_mappings import RACPropertyMappingViewSet
|
||||
from authentik.providers.rac.api.providers import RACProviderViewSet
|
||||
from authentik.providers.rac.consumer_client import RACClientConsumer
|
||||
from authentik.providers.rac.consumer_outpost import RACOutpostConsumer
|
||||
from authentik.providers.rac.views import RACInterface, RACStartView
|
||||
from authentik.root.asgi_middleware import SessionMiddleware
|
||||
from authentik.root.middleware import ChannelsLoggingMiddleware
|
||||
|
@ -10,6 +10,8 @@ from django.utils.translation import gettext as _
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
from authentik.core.views.interface import InterfaceView
|
||||
from authentik.enterprise.policy import EnterprisePolicyAccessView
|
||||
from authentik.enterprise.providers.rac.models import ConnectionToken, Endpoint, RACProvider
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.challenge import RedirectChallenge
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
@ -18,11 +20,9 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
||||
from authentik.flows.stage import RedirectStage
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
|
||||
|
||||
|
||||
class RACStartView(PolicyAccessView):
|
||||
class RACStartView(EnterprisePolicyAccessView):
|
||||
"""Start a RAC connection by checking access and creating a connection token"""
|
||||
|
||||
endpoint: Endpoint
|
@ -16,6 +16,7 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.rac",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
"authentik.enterprise.stages.source",
|
||||
|
@ -9,16 +9,13 @@ from django.utils.timezone import now
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.models import Source, User
|
||||
from authentik.core.sources.flow_manager import (
|
||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN,
|
||||
SESSION_KEY_SOURCE_FLOW_STAGES,
|
||||
)
|
||||
from authentik.core.sources.flow_manager import SESSION_KEY_OVERRIDE_FLOW_TOKEN
|
||||
from authentik.core.types import UILoginButton
|
||||
from authentik.enterprise.stages.source.models import SourceStage
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||
from authentik.flows.models import FlowToken, in_memory_stage
|
||||
from authentik.flows.models import FlowToken
|
||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
|
||||
from authentik.flows.stage import ChallengeStageView, StageView
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
PLAN_CONTEXT_RESUME_TOKEN = "resume_token" # nosec
|
||||
@ -52,7 +49,6 @@ class SourceStageView(ChallengeStageView):
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
resume_token = self.create_flow_token()
|
||||
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
||||
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
|
||||
return self.login_button.challenge
|
||||
|
||||
def create_flow_token(self) -> FlowToken:
|
||||
@ -81,19 +77,3 @@ class SourceStageView(ChallengeStageView):
|
||||
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||
return self.executor.stage_ok()
|
||||
|
||||
|
||||
class SourceStageFinal(StageView):
|
||||
"""Dynamic stage injected in the source flow manager. This is injected in the
|
||||
flow the source flow manager picks (authentication or enrollment), and will run at the end.
|
||||
This stage uses the override flow token to resume execution of the initial flow the
|
||||
source stage is bound to."""
|
||||
|
||||
def dispatch(self):
|
||||
token: FlowToken = self.request.session.get(SESSION_KEY_OVERRIDE_FLOW_TOKEN)
|
||||
self._logger.info("Replacing source flow with overridden flow", flow=token.flow.slug)
|
||||
plan = token.plan
|
||||
plan.context[PLAN_CONTEXT_IS_RESTORED] = token
|
||||
response = plan.to_redirect(self.request, token.flow)
|
||||
token.delete()
|
||||
return response
|
||||
|
@ -1,54 +0,0 @@
|
||||
"""Email utility functions"""
|
||||
|
||||
|
||||
def mask_email(email: str | None) -> str | None:
|
||||
"""Mask email address for privacy
|
||||
|
||||
Args:
|
||||
email: Email address to mask
|
||||
Returns:
|
||||
Masked email address or None if input is None
|
||||
Example:
|
||||
mask_email("myname@company.org")
|
||||
'm*****@c******.org'
|
||||
"""
|
||||
if not email:
|
||||
return None
|
||||
|
||||
# Basic email format validation
|
||||
if email.count("@") != 1:
|
||||
raise ValueError("Invalid email format: Must contain exactly one '@' symbol")
|
||||
|
||||
local, domain = email.split("@")
|
||||
if not local or not domain:
|
||||
raise ValueError("Invalid email format: Local and domain parts cannot be empty")
|
||||
|
||||
domain_parts = domain.split(".")
|
||||
if len(domain_parts) < 2: # noqa: PLR2004
|
||||
raise ValueError("Invalid email format: Domain must contain at least one dot")
|
||||
|
||||
limit = 2
|
||||
|
||||
# Mask local part (keep first char)
|
||||
if len(local) <= limit:
|
||||
masked_local = "*" * len(local)
|
||||
else:
|
||||
masked_local = local[0] + "*" * (len(local) - 1)
|
||||
|
||||
# Mask each domain part except the last one (TLD)
|
||||
masked_domain_parts = []
|
||||
for _i, part in enumerate(domain_parts[:-1]): # Process all parts except TLD
|
||||
if not part: # Check for empty parts (consecutive dots)
|
||||
raise ValueError("Invalid email format: Domain parts cannot be empty")
|
||||
if len(part) <= limit:
|
||||
masked_part = "*" * len(part)
|
||||
else:
|
||||
masked_part = part[0] + "*" * (len(part) - 1)
|
||||
masked_domain_parts.append(masked_part)
|
||||
|
||||
# Add TLD unchanged
|
||||
if not domain_parts[-1]: # Check if TLD is empty
|
||||
raise ValueError("Invalid email format: TLD cannot be empty")
|
||||
masked_domain_parts.append(domain_parts[-1])
|
||||
|
||||
return f"{masked_local}@{'.'.join(masked_domain_parts)}"
|
@ -19,6 +19,7 @@ from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.providers.rac.models import RACProvider
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
||||
@ -30,7 +31,6 @@ from authentik.outposts.models import (
|
||||
)
|
||||
from authentik.providers.ldap.models import LDAPProvider
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
from authentik.providers.rac.models import RACProvider
|
||||
from authentik.providers.radius.models import RadiusProvider
|
||||
|
||||
|
||||
|
@ -128,6 +128,12 @@ class OutpostConsumer(JsonWebsocketConsumer):
|
||||
state.args.update(msg.args)
|
||||
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
||||
return
|
||||
elif msg.instruction == WebsocketMessageInstruction.PROVIDER_SPECIFIC:
|
||||
if "response_channel" not in msg.args:
|
||||
return
|
||||
self.logger.debug("Posted response to channel", msg=msg)
|
||||
async_to_sync(self.channel_layer.send)(msg.args.get("response_channel"), content)
|
||||
return
|
||||
GAUGE_OUTPOSTS_LAST_UPDATE.labels(
|
||||
tenant=connection.schema_name,
|
||||
outpost=self.outpost.name,
|
||||
|
86
authentik/outposts/http.py
Normal file
86
authentik/outposts/http.py
Normal file
@ -0,0 +1,86 @@
|
||||
from base64 import b64decode
|
||||
from dataclasses import asdict, dataclass
|
||||
from random import choice
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from channels_redis.pubsub import RedisPubSubChannelLayer
|
||||
from requests.adapters import BaseAdapter
|
||||
from requests.models import PreparedRequest, Response
|
||||
from requests.utils import CaseInsensitiveDict
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.outposts.models import Outpost
|
||||
|
||||
|
||||
@dataclass
|
||||
class OutpostPreparedRequest:
|
||||
uid: str
|
||||
method: str
|
||||
url: str
|
||||
headers: dict[str, str]
|
||||
body: Any
|
||||
ssl_verify: bool
|
||||
timeout: int
|
||||
|
||||
@staticmethod
|
||||
def from_requests(req: PreparedRequest) -> "OutpostPreparedRequest":
|
||||
return OutpostPreparedRequest(
|
||||
uid=str(uuid4()),
|
||||
method=req.method,
|
||||
url=req.url,
|
||||
headers=req.headers._store,
|
||||
body=req.body,
|
||||
ssl_verify=True,
|
||||
timeout=0,
|
||||
)
|
||||
|
||||
@property
|
||||
def response_channel(self) -> str:
|
||||
return f"authentik_outpost_http_response_{self.uid}"
|
||||
|
||||
|
||||
class OutpostHTTPAdapter(BaseAdapter):
|
||||
"""Requests Adapter that sends HTTP requests via a specified Outpost"""
|
||||
|
||||
def __init__(self, outpost: Outpost, default_timeout=10):
|
||||
super().__init__()
|
||||
self.__outpost = outpost
|
||||
self.__logger = get_logger().bind()
|
||||
self.__layer: RedisPubSubChannelLayer = get_channel_layer()
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
def parse_response(self, raw_response: dict, req: PreparedRequest) -> Response:
|
||||
res = Response()
|
||||
res.request = req
|
||||
res.status_code = raw_response.get("status")
|
||||
res.url = raw_response.get("final_url")
|
||||
res.headers = CaseInsensitiveDict(raw_response.get("headers"))
|
||||
res._content = b64decode(raw_response.get("body"))
|
||||
return res
|
||||
|
||||
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
||||
# Convert request so we can send it to the outpost
|
||||
converted = OutpostPreparedRequest.from_requests(request)
|
||||
converted.ssl_verify = verify
|
||||
converted.timeout = timeout if timeout else self.default_timeout
|
||||
# Pick one of the outpost instances
|
||||
state = choice(self.__outpost.state) # nosec
|
||||
self.__logger.debug("sending HTTP request to outpost", uid=converted.uid)
|
||||
async_to_sync(self.__layer.send)(
|
||||
state.uid,
|
||||
{
|
||||
"type": "event.provider.specific",
|
||||
"sub_type": "http_request",
|
||||
"response_channel": converted.response_channel,
|
||||
"request": asdict(converted),
|
||||
},
|
||||
)
|
||||
self.__logger.debug("receiving HTTP response from outpost", uid=converted.uid)
|
||||
raw_response = async_to_sync(self.__layer.receive)(
|
||||
converted.response_channel,
|
||||
)
|
||||
self.__logger.debug("received HTTP response from outpost", uid=converted.uid)
|
||||
return self.parse_response(raw_response.get("args", {}).get("response", {}), request)
|
@ -98,6 +98,7 @@ class OutpostType(models.TextChoices):
|
||||
LDAP = "ldap"
|
||||
RADIUS = "radius"
|
||||
RAC = "rac"
|
||||
SCIM = "scim"
|
||||
|
||||
|
||||
def default_outpost_config(host: str | None = None):
|
||||
|
@ -18,6 +18,8 @@ from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||
from structlog.stdlib import get_logger
|
||||
from yaml import safe_load
|
||||
|
||||
from authentik.enterprise.providers.rac.controllers.docker import RACDockerController
|
||||
from authentik.enterprise.providers.rac.controllers.kubernetes import RACKubernetesController
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
@ -39,17 +41,17 @@ from authentik.providers.ldap.controllers.docker import LDAPDockerController
|
||||
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
|
||||
from authentik.providers.proxy.controllers.docker import ProxyDockerController
|
||||
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||
from authentik.providers.rac.controllers.docker import RACDockerController
|
||||
from authentik.providers.rac.controllers.kubernetes import RACKubernetesController
|
||||
from authentik.providers.radius.controllers.docker import RadiusDockerController
|
||||
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
|
||||
from authentik.providers.scim.controllers.docker import SCIMDockerController
|
||||
from authentik.providers.scim.controllers.kubernetes import SCIMKubernetesController
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s"
|
||||
|
||||
|
||||
def controller_for_outpost(outpost: Outpost) -> type[BaseController] | None:
|
||||
def controller_for_outpost(outpost: Outpost) -> type[BaseController] | None: # noqa: PLR0911
|
||||
"""Get a controller for the outpost, when a service connection is defined"""
|
||||
if not outpost.service_connection:
|
||||
return None
|
||||
@ -74,6 +76,11 @@ def controller_for_outpost(outpost: Outpost) -> type[BaseController] | None:
|
||||
return RACDockerController
|
||||
if isinstance(service_connection, KubernetesServiceConnection):
|
||||
return RACKubernetesController
|
||||
if outpost.type == OutpostType.SCIM:
|
||||
if isinstance(service_connection, DockerServiceConnection):
|
||||
return SCIMDockerController
|
||||
if isinstance(service_connection, KubernetesServiceConnection):
|
||||
return SCIMKubernetesController
|
||||
return None
|
||||
|
||||
|
||||
|
@ -42,12 +42,6 @@ class GeoIPPolicySerializer(CountryFieldMixin, PolicySerializer):
|
||||
"asns",
|
||||
"countries",
|
||||
"countries_obj",
|
||||
"check_history_distance",
|
||||
"history_max_distance_km",
|
||||
"distance_tolerance_km",
|
||||
"history_login_count",
|
||||
"check_impossible_travel",
|
||||
"impossible_tolerance_km",
|
||||
]
|
||||
|
||||
|
||||
|
@ -1,43 +0,0 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-02 20:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_geoip", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="check_history_distance",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="check_impossible_travel",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="distance_tolerance_km",
|
||||
field=models.PositiveIntegerField(default=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="history_login_count",
|
||||
field=models.PositiveIntegerField(default=5),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="history_max_distance_km",
|
||||
field=models.PositiveBigIntegerField(default=100),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="geoippolicy",
|
||||
name="impossible_tolerance_km",
|
||||
field=models.PositiveIntegerField(default=100),
|
||||
),
|
||||
]
|
@ -4,21 +4,15 @@ from itertools import chain
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from django_countries.fields import CountryField
|
||||
from geopy import distance
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.events.context_processors.geoip import GeoIPDict
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
|
||||
MAX_DISTANCE_HOUR_KM = 1000
|
||||
|
||||
|
||||
class GeoIPPolicy(Policy):
|
||||
"""Ensure the user satisfies requirements of geography or network topology, based on IP
|
||||
@ -27,15 +21,6 @@ class GeoIPPolicy(Policy):
|
||||
asns = ArrayField(models.IntegerField(), blank=True, default=list)
|
||||
countries = CountryField(multiple=True, blank=True)
|
||||
|
||||
distance_tolerance_km = models.PositiveIntegerField(default=50)
|
||||
|
||||
check_history_distance = models.BooleanField(default=False)
|
||||
history_max_distance_km = models.PositiveBigIntegerField(default=100)
|
||||
history_login_count = models.PositiveIntegerField(default=5)
|
||||
|
||||
check_impossible_travel = models.BooleanField(default=False)
|
||||
impossible_tolerance_km = models.PositiveIntegerField(default=100)
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.policies.geoip.api import GeoIPPolicySerializer
|
||||
@ -52,27 +37,21 @@ class GeoIPPolicy(Policy):
|
||||
- the client IP is advertised by an autonomous system with ASN in the `asns`
|
||||
- the client IP is geolocated in a country of `countries`
|
||||
"""
|
||||
static_results: list[PolicyResult] = []
|
||||
dynamic_results: list[PolicyResult] = []
|
||||
results: list[PolicyResult] = []
|
||||
|
||||
if self.asns:
|
||||
static_results.append(self.passes_asn(request))
|
||||
results.append(self.passes_asn(request))
|
||||
if self.countries:
|
||||
static_results.append(self.passes_country(request))
|
||||
results.append(self.passes_country(request))
|
||||
|
||||
if self.check_history_distance or self.check_impossible_travel:
|
||||
dynamic_results.append(self.passes_distance(request))
|
||||
|
||||
if not static_results and not dynamic_results:
|
||||
if not results:
|
||||
return PolicyResult(True)
|
||||
|
||||
passing = any(r.passing for r in static_results) and all(r.passing for r in dynamic_results)
|
||||
messages = chain(
|
||||
*[r.messages for r in static_results], *[r.messages for r in dynamic_results]
|
||||
)
|
||||
passing = any(r.passing for r in results)
|
||||
messages = chain(*[r.messages for r in results])
|
||||
|
||||
result = PolicyResult(passing, *messages)
|
||||
result.source_results = list(chain(static_results, dynamic_results))
|
||||
result.source_results = results
|
||||
|
||||
return result
|
||||
|
||||
@ -94,7 +73,7 @@ class GeoIPPolicy(Policy):
|
||||
|
||||
def passes_country(self, request: PolicyRequest) -> PolicyResult:
|
||||
# This is not a single get chain because `request.context` can contain `{ "geoip": None }`.
|
||||
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||
geoip_data = request.context.get("geoip")
|
||||
country = geoip_data.get("country") if geoip_data else None
|
||||
|
||||
if not country:
|
||||
@ -108,42 +87,6 @@ class GeoIPPolicy(Policy):
|
||||
|
||||
return PolicyResult(True)
|
||||
|
||||
def passes_distance(self, request: PolicyRequest) -> PolicyResult:
|
||||
"""Check if current policy execution is out of distance range compared
|
||||
to previous authentication requests"""
|
||||
# Get previous login event and GeoIP data
|
||||
previous_logins = Event.objects.filter(
|
||||
action=EventAction.LOGIN, user__pk=request.user.pk, context__geo__isnull=False
|
||||
).order_by("-created")[: self.history_login_count]
|
||||
_now = now()
|
||||
geoip_data: GeoIPDict | None = request.context.get("geoip")
|
||||
if not geoip_data:
|
||||
return PolicyResult(False)
|
||||
for previous_login in previous_logins:
|
||||
previous_login_geoip: GeoIPDict = previous_login.context["geo"]
|
||||
|
||||
# Figure out distance
|
||||
dist = distance.geodesic(
|
||||
(previous_login_geoip["lat"], previous_login_geoip["long"]),
|
||||
(geoip_data["lat"], geoip_data["long"]),
|
||||
)
|
||||
if self.check_history_distance and dist.km >= (
|
||||
self.history_max_distance_km + self.distance_tolerance_km
|
||||
):
|
||||
return PolicyResult(
|
||||
False, _("Distance from previous authentication is larger than threshold.")
|
||||
)
|
||||
# Check if distance between `previous_login` and now is more
|
||||
# than max distance per hour times the amount of hours since the previous login
|
||||
# (round down to the lowest closest time of hours)
|
||||
# clamped to be at least 1 hour
|
||||
rel_time_hours = max(int((_now - previous_login.created).total_seconds() / 3600), 1)
|
||||
if self.check_impossible_travel and dist.km >= (
|
||||
(MAX_DISTANCE_HOUR_KM * rel_time_hours) + self.distance_tolerance_km
|
||||
):
|
||||
return PolicyResult(False, _("Distance is further than possible."))
|
||||
return PolicyResult(True)
|
||||
|
||||
class Meta(Policy.PolicyMeta):
|
||||
verbose_name = _("GeoIP Policy")
|
||||
verbose_name_plural = _("GeoIP Policies")
|
||||
|
@ -1,10 +1,8 @@
|
||||
"""geoip policy tests"""
|
||||
|
||||
from django.test import TestCase
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import get_user
|
||||
from authentik.policies.engine import PolicyRequest, PolicyResult
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.geoip.exceptions import GeoIPNotFoundException
|
||||
@ -16,8 +14,8 @@ class TestGeoIPPolicy(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user = create_test_user()
|
||||
self.request = PolicyRequest(self.user)
|
||||
|
||||
self.request = PolicyRequest(get_anonymous_user())
|
||||
|
||||
self.context_disabled_geoip = {}
|
||||
self.context_unknown_ip = {"asn": None, "geoip": None}
|
||||
@ -128,70 +126,3 @@ class TestGeoIPPolicy(TestCase):
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
|
||||
self.assertTrue(result.passing)
|
||||
|
||||
def test_history(self):
|
||||
"""Test history checks"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_no_data(self):
|
||||
"""Test history checks (with no geoip data in context)"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_impossible_travel(self):
|
||||
"""Test history checks"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={
|
||||
# Random location in Canada
|
||||
"geo": {"lat": 55.868351, "long": -104.441011},
|
||||
},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_impossible_travel=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
||||
def test_history_no_geoip(self):
|
||||
"""Test history checks (previous login with no geoip data)"""
|
||||
Event.objects.create(
|
||||
action=EventAction.LOGIN,
|
||||
user=get_user(self.user),
|
||||
context={},
|
||||
)
|
||||
# Random location in Poland
|
||||
self.request.context["geoip"] = {"lat": 50.950613, "long": 20.363679}
|
||||
|
||||
policy = GeoIPPolicy.objects.create(check_history_distance=True)
|
||||
|
||||
result: PolicyResult = policy.passes(self.request)
|
||||
self.assertFalse(result.passing)
|
||||
|
@ -148,10 +148,10 @@ class PasswordPolicy(Policy):
|
||||
user_inputs.append(request.user.email)
|
||||
if request.http_request:
|
||||
user_inputs.append(request.http_request.brand.branding_title)
|
||||
# Only calculate result for the first 72 characters, as with over 100 char
|
||||
# Only calculate result for the first 100 characters, as with over 100 char
|
||||
# long passwords we can be reasonably sure that they'll surpass the score anyways
|
||||
# See https://github.com/dropbox/zxcvbn#runtime-latency
|
||||
results = zxcvbn(password[:72], user_inputs)
|
||||
results = zxcvbn(password[:100], user_inputs)
|
||||
LOGGER.debug("password failed", check="zxcvbn", score=results["score"])
|
||||
result = PolicyResult(results["score"] > self.zxcvbn_score_threshold)
|
||||
if not result.passing:
|
||||
|
@ -1,14 +0,0 @@
|
||||
"""RAC app config"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AuthentikProviderRAC(AppConfig):
|
||||
"""authentik rac app config"""
|
||||
|
||||
name = "authentik.providers.rac"
|
||||
label = "authentik_providers_rac"
|
||||
verbose_name = "authentik Providers.RAC"
|
||||
default = True
|
||||
mountpoint = ""
|
||||
ws_mountpoint = "authentik.providers.rac.urls"
|
@ -19,6 +19,7 @@ from authentik.lib.sync.outgoing.exceptions import (
|
||||
TransientSyncException,
|
||||
)
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.outposts.http import OutpostHTTPAdapter
|
||||
from authentik.providers.scim.clients.exceptions import SCIMRequestException
|
||||
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
@ -41,8 +42,7 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
|
||||
def __init__(self, provider: SCIMProvider):
|
||||
super().__init__(provider)
|
||||
self._session = get_http_session()
|
||||
self._session.verify = provider.verify_certificates
|
||||
self._session = self.get_session(provider)
|
||||
self.provider = provider
|
||||
# Remove trailing slashes as we assume the URL doesn't have any
|
||||
base_url = provider.url
|
||||
@ -52,6 +52,15 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
self.token = provider.token
|
||||
self._config = self.get_service_provider_config()
|
||||
|
||||
def get_session(self, provider: SCIMProvider):
|
||||
session = get_http_session()
|
||||
if self.provider.outpost_set.exists():
|
||||
adapter = OutpostHTTPAdapter()
|
||||
session.mount("https://", adapter)
|
||||
session.mount("http://", adapter)
|
||||
session.verify = provider.verify_certificates
|
||||
return session
|
||||
|
||||
def _request(self, method: str, path: str, **kwargs) -> dict:
|
||||
"""Wrapper to send a request to the full URL"""
|
||||
try:
|
||||
|
12
authentik/providers/scim/controllers/docker.py
Normal file
12
authentik/providers/scim/controllers/docker.py
Normal file
@ -0,0 +1,12 @@
|
||||
"""SCIM Provider Docker Controller"""
|
||||
|
||||
from authentik.outposts.controllers.docker import DockerController
|
||||
from authentik.outposts.models import DockerServiceConnection, Outpost
|
||||
|
||||
|
||||
class SCIMDockerController(DockerController):
|
||||
"""SCIM Provider Docker Controller"""
|
||||
|
||||
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
|
||||
super().__init__(outpost, connection)
|
||||
self.deployment_ports = []
|
14
authentik/providers/scim/controllers/kubernetes.py
Normal file
14
authentik/providers/scim/controllers/kubernetes.py
Normal file
@ -0,0 +1,14 @@
|
||||
"""SCIM Provider Kubernetes Controller"""
|
||||
|
||||
from authentik.outposts.controllers.k8s.service import ServiceReconciler
|
||||
from authentik.outposts.controllers.kubernetes import KubernetesController
|
||||
from authentik.outposts.models import KubernetesServiceConnection, Outpost
|
||||
|
||||
|
||||
class SCIMKubernetesController(KubernetesController):
|
||||
"""SCIM Provider Kubernetes Controller"""
|
||||
|
||||
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
|
||||
super().__init__(outpost, connection)
|
||||
self.deployment_ports = []
|
||||
del self.reconcilers[ServiceReconciler.reconciler_name()]
|
@ -2,7 +2,7 @@
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models import Q, QuerySet
|
||||
from django_filters.filters import ModelChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
@ -18,6 +18,7 @@ from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from authentik.blueprints.v1.importer import excluded_models
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.lib.validators import RequiredTogetherValidator
|
||||
@ -105,13 +106,13 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet):
|
||||
]
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
return (
|
||||
Permission.objects.all()
|
||||
.select_related("content_type")
|
||||
.filter(
|
||||
content_type__app_label__startswith="authentik",
|
||||
query = Q()
|
||||
for model in excluded_models():
|
||||
query |= Q(
|
||||
content_type__app_label=model._meta.app_label,
|
||||
content_type__model=model._meta.model_name,
|
||||
)
|
||||
)
|
||||
return Permission.objects.all().select_related("content_type").exclude(query)
|
||||
|
||||
|
||||
class PermissionAssignSerializer(PassiveSerializer):
|
||||
|
@ -87,7 +87,6 @@ TENANT_APPS = [
|
||||
"authentik.providers.ldap",
|
||||
"authentik.providers.oauth2",
|
||||
"authentik.providers.proxy",
|
||||
"authentik.providers.rac",
|
||||
"authentik.providers.radius",
|
||||
"authentik.providers.saml",
|
||||
"authentik.providers.scim",
|
||||
@ -101,7 +100,6 @@ TENANT_APPS = [
|
||||
"authentik.sources.scim",
|
||||
"authentik.stages.authenticator",
|
||||
"authentik.stages.authenticator_duo",
|
||||
"authentik.stages.authenticator_email",
|
||||
"authentik.stages.authenticator_sms",
|
||||
"authentik.stages.authenticator_static",
|
||||
"authentik.stages.authenticator_totp",
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
@ -22,35 +21,10 @@ class AzureADOAuthRedirect(OAuthRedirect):
|
||||
}
|
||||
|
||||
|
||||
class AzureADClient(UserprofileHeaderAuthClient):
|
||||
"""Fetch AzureAD group information"""
|
||||
|
||||
def get_profile_info(self, token):
|
||||
profile_data = super().get_profile_info(token)
|
||||
if "https://graph.microsoft.com/GroupMember.Read.All" not in self.source.additional_scopes:
|
||||
return profile_data
|
||||
group_response = self.session.request(
|
||||
"get",
|
||||
"https://graph.microsoft.com/v1.0/me/memberOf",
|
||||
headers={"Authorization": f"{token['token_type']} {token['access_token']}"},
|
||||
)
|
||||
try:
|
||||
group_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
LOGGER.warning(
|
||||
"Unable to fetch user profile",
|
||||
exc=exc,
|
||||
response=exc.response.text if exc.response else str(exc),
|
||||
)
|
||||
return None
|
||||
profile_data["raw_groups"] = group_response.json()
|
||||
return profile_data
|
||||
|
||||
|
||||
class AzureADOAuthCallback(OpenIDConnectOAuth2Callback):
|
||||
"""AzureAD OAuth2 Callback"""
|
||||
|
||||
client_class = AzureADClient
|
||||
client_class = UserprofileHeaderAuthClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
# Default try to get `id` for the Graph API endpoint
|
||||
@ -79,24 +53,8 @@ class AzureADType(SourceType):
|
||||
|
||||
def get_base_user_properties(self, info: dict[str, Any], **kwargs) -> dict[str, Any]:
|
||||
mail = info.get("mail", None) or info.get("otherMails", [None])[0]
|
||||
# Format group info
|
||||
groups = []
|
||||
group_id_dict = {}
|
||||
for group in info.get("raw_groups", {}).get("value", []):
|
||||
if group["@odata.type"] != "#microsoft.graph.group":
|
||||
continue
|
||||
groups.append(group["id"])
|
||||
group_id_dict[group["id"]] = group
|
||||
info["raw_groups"] = group_id_dict
|
||||
return {
|
||||
"username": info.get("userPrincipalName"),
|
||||
"email": mail,
|
||||
"name": info.get("displayName"),
|
||||
"groups": groups,
|
||||
}
|
||||
|
||||
def get_base_group_properties(self, source, group_id, **kwargs):
|
||||
raw_group = kwargs["info"]["raw_groups"][group_id]
|
||||
return {
|
||||
"name": raw_group["displayName"],
|
||||
}
|
||||
|
@ -1,85 +0,0 @@
|
||||
"""AuthenticatorEmailStage API Views"""
|
||||
|
||||
from rest_framework import mixins
|
||||
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
||||
|
||||
from authentik.core.api.groups import GroupMemberSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.flows.api.stages import StageSerializer
|
||||
from authentik.stages.authenticator_email.models import AuthenticatorEmailStage, EmailDevice
|
||||
|
||||
|
||||
class AuthenticatorEmailStageSerializer(StageSerializer):
|
||||
"""AuthenticatorEmailStage Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = AuthenticatorEmailStage
|
||||
fields = StageSerializer.Meta.fields + [
|
||||
"configure_flow",
|
||||
"friendly_name",
|
||||
"use_global_settings",
|
||||
"host",
|
||||
"port",
|
||||
"username",
|
||||
"password",
|
||||
"use_tls",
|
||||
"use_ssl",
|
||||
"timeout",
|
||||
"from_address",
|
||||
"subject",
|
||||
"token_expiry",
|
||||
"template",
|
||||
]
|
||||
|
||||
|
||||
class AuthenticatorEmailStageViewSet(UsedByMixin, ModelViewSet):
|
||||
"""AuthenticatorEmailStage Viewset"""
|
||||
|
||||
queryset = AuthenticatorEmailStage.objects.all()
|
||||
serializer_class = AuthenticatorEmailStageSerializer
|
||||
filterset_fields = "__all__"
|
||||
ordering = ["name"]
|
||||
search_fields = ["name"]
|
||||
|
||||
|
||||
class EmailDeviceSerializer(ModelSerializer):
|
||||
"""Serializer for email authenticator devices"""
|
||||
|
||||
user = GroupMemberSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = EmailDevice
|
||||
fields = ["name", "pk", "email", "user"]
|
||||
depth = 2
|
||||
extra_kwargs = {
|
||||
"email": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class EmailDeviceViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
"""Viewset for email authenticator devices"""
|
||||
|
||||
queryset = EmailDevice.objects.all()
|
||||
serializer_class = EmailDeviceSerializer
|
||||
search_fields = ["name"]
|
||||
filterset_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
owner_field = "user"
|
||||
|
||||
|
||||
class EmailAdminDeviceViewSet(ModelViewSet):
|
||||
"""Viewset for email authenticator devices (for admins)"""
|
||||
|
||||
queryset = EmailDevice.objects.all()
|
||||
serializer_class = EmailDeviceSerializer
|
||||
search_fields = ["name"]
|
||||
filterset_fields = ["name"]
|
||||
ordering = ["name"]
|
@ -1,12 +0,0 @@
|
||||
"""Email Authenticator"""
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
|
||||
|
||||
class AuthentikStageAuthenticatorEmailConfig(ManagedAppConfig):
|
||||
"""Email Authenticator App config"""
|
||||
|
||||
name = "authentik.stages.authenticator_email"
|
||||
label = "authentik_stages_authenticator_email"
|
||||
verbose_name = "authentik Stages.Authenticator.Email"
|
||||
default = True
|
@ -1,132 +0,0 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-27 20:05
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import authentik.lib.utils.time
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0027_auto_20231028_1424"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="AuthenticatorEmailStage",
|
||||
fields=[
|
||||
(
|
||||
"stage_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_flows.stage",
|
||||
),
|
||||
),
|
||||
("friendly_name", models.TextField(null=True)),
|
||||
(
|
||||
"use_global_settings",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="When enabled, global Email connection settings will be used and connection settings below will be ignored.",
|
||||
),
|
||||
),
|
||||
("host", models.TextField(default="localhost")),
|
||||
("port", models.IntegerField(default=25)),
|
||||
("username", models.TextField(blank=True, default="")),
|
||||
("password", models.TextField(blank=True, default="")),
|
||||
("use_tls", models.BooleanField(default=False)),
|
||||
("use_ssl", models.BooleanField(default=False)),
|
||||
("timeout", models.IntegerField(default=10)),
|
||||
(
|
||||
"from_address",
|
||||
models.EmailField(default="system@authentik.local", max_length=254),
|
||||
),
|
||||
(
|
||||
"token_expiry",
|
||||
models.TextField(
|
||||
default="minutes=30",
|
||||
help_text="Time the token sent is valid (Format: hours=3,minutes=17,seconds=300).",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
("subject", models.TextField(default="authentik Sign-in code")),
|
||||
("template", models.TextField(default="email/email_otp.html")),
|
||||
(
|
||||
"configure_flow",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Flow used by an authenticated user to configure this Stage. If empty, user will not be able to configure this stage.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="authentik_flows.flow",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Email Authenticator Setup Stage",
|
||||
"verbose_name_plural": "Email Authenticator Setup Stages",
|
||||
},
|
||||
bases=("authentik_flows.stage", models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="EmailDevice",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("last_updated", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="The human-readable name of this device.", max_length=64
|
||||
),
|
||||
),
|
||||
(
|
||||
"confirmed",
|
||||
models.BooleanField(default=True, help_text="Is this device ready for use?"),
|
||||
),
|
||||
("token", models.CharField(blank=True, max_length=16, null=True)),
|
||||
(
|
||||
"valid_until",
|
||||
models.DateTimeField(
|
||||
default=django.utils.timezone.now,
|
||||
help_text="The timestamp of the moment of expiry of the saved token.",
|
||||
),
|
||||
),
|
||||
("email", models.EmailField(max_length=254)),
|
||||
("last_used", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"stage",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_stages_authenticator_email.authenticatoremailstage",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Email Device",
|
||||
"verbose_name_plural": "Email Devices",
|
||||
"unique_together": {("user", "email")},
|
||||
},
|
||||
),
|
||||
]
|
@ -1,167 +0,0 @@
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.mail.backends.base import BaseEmailBackend
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
from django.db import models
|
||||
from django.template import TemplateSyntaxError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.time import timedelta_string_validator
|
||||
from authentik.stages.authenticator.models import SideChannelDevice
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
|
||||
|
||||
class EmailTemplates(models.TextChoices):
|
||||
"""Templates used for rendering the Email"""
|
||||
|
||||
EMAIL_OTP = (
|
||||
"email/email_otp.html",
|
||||
_("Email OTP"),
|
||||
) # nosec
|
||||
|
||||
|
||||
class AuthenticatorEmailStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||
"""Use Email-based authentication instead of authenticator-based."""
|
||||
|
||||
use_global_settings = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_(
|
||||
"When enabled, global Email connection settings will be used and "
|
||||
"connection settings below will be ignored."
|
||||
),
|
||||
)
|
||||
|
||||
host = models.TextField(default="localhost")
|
||||
port = models.IntegerField(default=25)
|
||||
username = models.TextField(default="", blank=True)
|
||||
password = models.TextField(default="", blank=True)
|
||||
use_tls = models.BooleanField(default=False)
|
||||
use_ssl = models.BooleanField(default=False)
|
||||
timeout = models.IntegerField(default=10)
|
||||
from_address = models.EmailField(default="system@authentik.local")
|
||||
|
||||
token_expiry = models.TextField(
|
||||
default="minutes=30",
|
||||
validators=[timedelta_string_validator],
|
||||
help_text=_("Time the token sent is valid (Format: hours=3,minutes=17,seconds=300)."),
|
||||
)
|
||||
subject = models.TextField(default="authentik Sign-in code")
|
||||
template = models.TextField(default=EmailTemplates.EMAIL_OTP)
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.stages.authenticator_email.api import AuthenticatorEmailStageSerializer
|
||||
|
||||
return AuthenticatorEmailStageSerializer
|
||||
|
||||
@property
|
||||
def view(self) -> type[View]:
|
||||
from authentik.stages.authenticator_email.stage import AuthenticatorEmailStageView
|
||||
|
||||
return AuthenticatorEmailStageView
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-stage-authenticator-email-form"
|
||||
|
||||
@property
|
||||
def backend_class(self) -> type[BaseEmailBackend]:
|
||||
"""Get the email backend class to use"""
|
||||
return EmailBackend
|
||||
|
||||
@property
|
||||
def backend(self) -> BaseEmailBackend:
|
||||
"""Get fully configured Email Backend instance"""
|
||||
if self.use_global_settings:
|
||||
CONFIG.refresh("email.password")
|
||||
return self.backend_class(
|
||||
host=CONFIG.get("email.host"),
|
||||
port=CONFIG.get_int("email.port"),
|
||||
username=CONFIG.get("email.username"),
|
||||
password=CONFIG.get("email.password"),
|
||||
use_tls=CONFIG.get_bool("email.use_tls", False),
|
||||
use_ssl=CONFIG.get_bool("email.use_ssl", False),
|
||||
timeout=CONFIG.get_int("email.timeout"),
|
||||
)
|
||||
return self.backend_class(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
use_tls=self.use_tls,
|
||||
use_ssl=self.use_ssl,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
def send(self, device: "EmailDevice"):
|
||||
# Lazy import here to avoid circular import
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
|
||||
# Compose the message using templates
|
||||
message = device._compose_email()
|
||||
return send_mails(device.stage, message)
|
||||
|
||||
def __str__(self):
|
||||
return f"Email Authenticator Stage {self.name}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Email Authenticator Setup Stage")
|
||||
verbose_name_plural = _("Email Authenticator Setup Stages")
|
||||
|
||||
|
||||
class EmailDevice(SerializerModel, SideChannelDevice):
|
||||
"""Email Device"""
|
||||
|
||||
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE)
|
||||
email = models.EmailField()
|
||||
stage = models.ForeignKey(AuthenticatorEmailStage, on_delete=models.CASCADE)
|
||||
last_used = models.DateTimeField(auto_now=True)
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.stages.authenticator_email.api import EmailDeviceSerializer
|
||||
|
||||
return EmailDeviceSerializer
|
||||
|
||||
def _compose_email(self) -> TemplateEmailMessage:
|
||||
try:
|
||||
pending_user = self.user
|
||||
stage = self.stage
|
||||
email = self.email
|
||||
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(stage.subject),
|
||||
to=[(pending_user.name, email)],
|
||||
template_name=stage.template,
|
||||
template_context={
|
||||
"user": pending_user,
|
||||
"expires": self.valid_until,
|
||||
"token": self.token,
|
||||
},
|
||||
)
|
||||
return message
|
||||
except TemplateSyntaxError as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=stage.template,
|
||||
).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def __str__(self):
|
||||
if not self.pk:
|
||||
return "New Email Device"
|
||||
return f"Email Device for {self.user_id}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Email Device")
|
||||
verbose_name_plural = _("Email Devices")
|
||||
unique_together = (("user", "email"),)
|
@ -1,177 +0,0 @@
|
||||
"""Email Setup stage"""
|
||||
|
||||
from django.db.models import Q
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.request import QueryDict
|
||||
from django.template.exceptions import TemplateSyntaxError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, CharField, IntegerField
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.challenge import (
|
||||
Challenge,
|
||||
ChallengeResponse,
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.lib.utils.email import mask_email
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.stages.authenticator_email.models import (
|
||||
AuthenticatorEmailStage,
|
||||
EmailDevice,
|
||||
)
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
|
||||
|
||||
SESSION_KEY_EMAIL_DEVICE = "authentik/stages/authenticator_email/email_device"
|
||||
PLAN_CONTEXT_EMAIL = "email"
|
||||
PLAN_CONTEXT_EMAIL_SENT = "email_sent"
|
||||
PLAN_CONTEXT_EMAIL_OVERRIDE = "email"
|
||||
|
||||
|
||||
class AuthenticatorEmailChallenge(WithUserInfoChallenge):
|
||||
"""Authenticator Email Setup challenge"""
|
||||
|
||||
# Set to true if no previous prompt stage set the email
|
||||
# this stage will also check prompt_data.email
|
||||
email = CharField(default=None, allow_blank=True, allow_null=True)
|
||||
email_required = BooleanField(default=True)
|
||||
component = CharField(default="ak-stage-authenticator-email")
|
||||
|
||||
|
||||
class AuthenticatorEmailChallengeResponse(ChallengeResponse):
|
||||
"""Authenticator Email Challenge response, device is set by get_response_instance"""
|
||||
|
||||
device: EmailDevice
|
||||
|
||||
code = IntegerField(required=False)
|
||||
email = CharField(required=False)
|
||||
|
||||
component = CharField(default="ak-stage-authenticator-email")
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
"""Check"""
|
||||
if "code" not in attrs:
|
||||
if "email" not in attrs:
|
||||
raise ValidationError("email required")
|
||||
self.device.email = attrs["email"]
|
||||
self.stage.validate_and_send(attrs["email"])
|
||||
return super().validate(attrs)
|
||||
if not self.device.verify_token(str(attrs["code"])):
|
||||
raise ValidationError(_("Code does not match"))
|
||||
self.device.confirmed = True
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class AuthenticatorEmailStageView(ChallengeStageView):
|
||||
"""Authenticator Email Setup stage"""
|
||||
|
||||
response_class = AuthenticatorEmailChallengeResponse
|
||||
|
||||
def validate_and_send(self, email: str):
|
||||
"""Validate email and send message"""
|
||||
pending_user = self.get_pending_user()
|
||||
|
||||
stage: AuthenticatorEmailStage = self.executor.current_stage
|
||||
if EmailDevice.objects.filter(Q(email=email), stage=stage.pk).exists():
|
||||
raise ValidationError(_("Invalid email"))
|
||||
|
||||
device: EmailDevice = self.request.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
|
||||
try:
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(stage.subject),
|
||||
to=[(pending_user.name, email)],
|
||||
language=pending_user.locale(self.request),
|
||||
template_name=stage.template,
|
||||
template_context={
|
||||
"user": pending_user,
|
||||
"expires": device.valid_until,
|
||||
"token": device.token,
|
||||
},
|
||||
)
|
||||
|
||||
send_mails(stage, message)
|
||||
except TemplateSyntaxError as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=stage.template,
|
||||
).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def _has_email(self) -> str | None:
|
||||
context = self.executor.plan.context
|
||||
|
||||
# Check user's email attribute
|
||||
user = self.get_pending_user()
|
||||
if user.email:
|
||||
self.logger.debug("got email from user attributes")
|
||||
return user.email
|
||||
# Check plan context for email
|
||||
if PLAN_CONTEXT_EMAIL in context.get(PLAN_CONTEXT_PROMPT, {}):
|
||||
self.logger.debug("got email from plan context")
|
||||
return context.get(PLAN_CONTEXT_PROMPT, {}).get(PLAN_CONTEXT_EMAIL)
|
||||
# Check device for email
|
||||
if SESSION_KEY_EMAIL_DEVICE in self.request.session:
|
||||
self.logger.debug("got email from device in session")
|
||||
device: EmailDevice = self.request.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
if device.email == "":
|
||||
return None
|
||||
return device.email
|
||||
return None
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
email = self._has_email()
|
||||
return AuthenticatorEmailChallenge(
|
||||
data={
|
||||
"email": mask_email(email),
|
||||
"email_required": email is None,
|
||||
}
|
||||
)
|
||||
|
||||
def get_response_instance(self, data: QueryDict) -> ChallengeResponse:
|
||||
response = super().get_response_instance(data)
|
||||
response.device = self.request.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
return response
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
user = self.get_pending_user()
|
||||
|
||||
stage: AuthenticatorEmailStage = self.executor.current_stage
|
||||
if SESSION_KEY_EMAIL_DEVICE not in self.request.session:
|
||||
device = EmailDevice(user=user, confirmed=False, stage=stage, name="Email Device")
|
||||
valid_secs: int = timedelta_from_string(stage.token_expiry).total_seconds()
|
||||
device.generate_token(valid_secs=valid_secs, commit=False)
|
||||
self.request.session[SESSION_KEY_EMAIL_DEVICE] = device
|
||||
if email := self._has_email():
|
||||
device.email = email
|
||||
try:
|
||||
self.validate_and_send(email)
|
||||
except ValidationError as exc:
|
||||
# We had an email given already (at this point only possible from flow
|
||||
# context), but an error occurred while sending (most likely)
|
||||
# due to a duplicate device, so delete the email we got given, reset the state
|
||||
# (ish) and retry
|
||||
device.email = ""
|
||||
self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {}).pop(
|
||||
PLAN_CONTEXT_EMAIL, None
|
||||
)
|
||||
self.request.session.pop(SESSION_KEY_EMAIL_DEVICE, None)
|
||||
self.logger.warning("failed to send email to pre-set address", exc=exc)
|
||||
return self.get(request, *args, **kwargs)
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||
"""Email Token is validated by challenge"""
|
||||
device: EmailDevice = self.request.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
if not device.confirmed:
|
||||
return self.challenge_invalid(response)
|
||||
device.save()
|
||||
del self.request.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
return self.executor.stage_ok()
|
@ -1,44 +0,0 @@
|
||||
{% extends "email/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load humanize %}
|
||||
|
||||
{% block content %}
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h1>
|
||||
{% blocktrans with username=user.username %}
|
||||
Hi {{ username }},
|
||||
{% endblocktrans %}
|
||||
</h1>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table border="0">
|
||||
<tr>
|
||||
<td align="center" style="max-width: 300px; padding: 20px 0; color: #212124;">
|
||||
{% blocktrans %}
|
||||
Email MFA code.
|
||||
{% endblocktrans %}
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center" class="btn btn-primary">
|
||||
{{ token }}
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
{% endblock %}
|
||||
|
||||
{% block sub_content %}
|
||||
<tr>
|
||||
<td style="padding: 20px; font-size: 12px; color: #212124;" align="center">
|
||||
{% blocktrans with expires=expires|timeuntil %}
|
||||
If you did not request this code, please ignore this email. The code above is valid for {{ expires }}.
|
||||
{% endblocktrans %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endblock %}
|
@ -1,13 +0,0 @@
|
||||
{% load i18n %}{% load humanize %}{% autoescape off %}{% blocktrans with username=user.username %}Hi {{ username }},{% endblocktrans %}
|
||||
|
||||
{% blocktrans %}
|
||||
Email MFA code
|
||||
{% endblocktrans %}
|
||||
{{ token }}
|
||||
{% blocktrans with expires=expires|timeuntil %}
|
||||
If you did not request this code, please ignore this email. The code above is valid for {{ expires }}.
|
||||
{% endblocktrans %}
|
||||
|
||||
--
|
||||
Powered by goauthentik.io.
|
||||
{% endautoescape %}
|
@ -1,340 +0,0 @@
|
||||
"""Test Email Authenticator API"""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from django.core import mail
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
from django.db.utils import IntegrityError
|
||||
from django.template.exceptions import TemplateDoesNotExist
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_user
|
||||
from authentik.flows.models import FlowStageBinding
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.email import mask_email
|
||||
from authentik.stages.authenticator_email.api import (
|
||||
AuthenticatorEmailStageSerializer,
|
||||
EmailDeviceSerializer,
|
||||
)
|
||||
from authentik.stages.authenticator_email.models import AuthenticatorEmailStage, EmailDevice
|
||||
from authentik.stages.authenticator_email.stage import (
|
||||
SESSION_KEY_EMAIL_DEVICE,
|
||||
)
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
|
||||
|
||||
class TestAuthenticatorEmailStage(FlowTestCase):
|
||||
"""Test Email Authenticator stage"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.flow = create_test_flow()
|
||||
self.user = create_test_admin_user()
|
||||
self.user_noemail = create_test_user(email="")
|
||||
self.stage = AuthenticatorEmailStage.objects.create(
|
||||
name="email-authenticator",
|
||||
use_global_settings=True,
|
||||
from_address="test@authentik.local",
|
||||
configure_flow=self.flow,
|
||||
token_expiry="minutes=30",
|
||||
) # nosec
|
||||
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0)
|
||||
self.device = EmailDevice.objects.create(
|
||||
user=self.user,
|
||||
stage=self.stage,
|
||||
email="test@authentik.local",
|
||||
)
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_device_str(self):
|
||||
"""Test string representation of device"""
|
||||
self.assertEqual(str(self.device), f"Email Device for {self.user.pk}")
|
||||
# Test unsaved device
|
||||
unsaved_device = EmailDevice(
|
||||
user=self.user,
|
||||
stage=self.stage,
|
||||
email="test@authentik.local",
|
||||
)
|
||||
self.assertEqual(str(unsaved_device), "New Email Device")
|
||||
|
||||
def test_stage_str(self):
|
||||
"""Test string representation of stage"""
|
||||
self.assertEqual(str(self.stage), f"Email Authenticator Stage {self.stage.name}")
|
||||
|
||||
def test_token_lifecycle(self):
|
||||
"""Test token generation, validation and expiry"""
|
||||
# Initially no token
|
||||
self.assertIsNone(self.device.token)
|
||||
|
||||
# Generate token
|
||||
self.device.generate_token()
|
||||
token = self.device.token
|
||||
self.assertIsNotNone(token)
|
||||
self.assertIsNotNone(self.device.valid_until)
|
||||
self.assertTrue(self.device.valid_until > now())
|
||||
|
||||
# Verify invalid token
|
||||
self.assertFalse(self.device.verify_token("000000"))
|
||||
|
||||
# Verify correct token (should clear token after verification)
|
||||
self.assertTrue(self.device.verify_token(token))
|
||||
self.assertIsNone(self.device.token)
|
||||
|
||||
def test_stage_no_prefill(self):
|
||||
"""Test stage without prefilled email"""
|
||||
self.client.force_login(self.user_noemail)
|
||||
with patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
PropertyMock(return_value=EmailBackend),
|
||||
):
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
self.user_noemail,
|
||||
component="ak-stage-authenticator-email",
|
||||
email_required=True,
|
||||
)
|
||||
|
||||
def test_stage_submit(self):
|
||||
"""Test stage email submission"""
|
||||
# Initialize the flow
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
self.user,
|
||||
component="ak-stage-authenticator-email",
|
||||
email_required=False,
|
||||
)
|
||||
|
||||
# Test email submission with locmem backend
|
||||
def mock_send_mails(stage, *messages):
|
||||
"""Mock send_mails to send directly"""
|
||||
for message in messages:
|
||||
message.send()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
return_value=EmailBackend,
|
||||
),
|
||||
patch(
|
||||
"authentik.stages.authenticator_email.stage.send_mails",
|
||||
side_effect=mock_send_mails,
|
||||
),
|
||||
):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
data={"component": "ak-stage-authenticator-email", "email": "test@example.com"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(mail.outbox), 1)
|
||||
sent_mail = mail.outbox[0]
|
||||
self.assertEqual(sent_mail.subject, self.stage.subject)
|
||||
self.assertEqual(sent_mail.to, [f"{self.user} <test@example.com>"])
|
||||
# Get from_address from global email config to test if global settings are being used
|
||||
from_address_global = CONFIG.get("email.from")
|
||||
self.assertEqual(sent_mail.from_email, from_address_global)
|
||||
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
self.user,
|
||||
component="ak-stage-authenticator-email",
|
||||
response_errors={},
|
||||
email_required=False,
|
||||
)
|
||||
|
||||
def test_email_template(self):
|
||||
"""Test email template rendering"""
|
||||
self.device.generate_token()
|
||||
message = self.device._compose_email()
|
||||
|
||||
self.assertIsInstance(message, TemplateEmailMessage)
|
||||
self.assertEqual(message.subject, self.stage.subject)
|
||||
self.assertEqual(message.to, [f"{self.user.name} <{self.device.email}>"])
|
||||
self.assertTrue(self.device.token in message.body)
|
||||
|
||||
def test_duplicate_email(self):
|
||||
"""Test attempting to use same email twice"""
|
||||
email = "test2@authentik.local"
|
||||
# First device
|
||||
EmailDevice.objects.create(
|
||||
user=self.user,
|
||||
stage=self.stage,
|
||||
email=email,
|
||||
)
|
||||
# Attempt to create second device with same email
|
||||
with self.assertRaises(IntegrityError):
|
||||
EmailDevice.objects.create(
|
||||
user=self.user,
|
||||
stage=self.stage,
|
||||
email=email,
|
||||
)
|
||||
|
||||
def test_token_expiry(self):
|
||||
"""Test token expiration behavior"""
|
||||
self.device.generate_token()
|
||||
token = self.device.token
|
||||
# Set token as expired
|
||||
self.device.valid_until = now() - timedelta(minutes=1)
|
||||
self.device.save()
|
||||
# Verify expired token fails
|
||||
self.assertFalse(self.device.verify_token(token))
|
||||
|
||||
def test_template_errors(self):
|
||||
"""Test handling of template errors"""
|
||||
self.stage.template = "{% invalid template %}"
|
||||
with self.assertRaises(TemplateDoesNotExist):
|
||||
self.stage.send(self.device)
|
||||
|
||||
def test_challenge_response_validation(self):
|
||||
"""Test challenge response validation"""
|
||||
# Initialize the flow
|
||||
self.client.force_login(self.user_noemail)
|
||||
with patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
PropertyMock(return_value=EmailBackend),
|
||||
):
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
|
||||
# Test missing code and email
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
data={"component": "ak-stage-authenticator-email"},
|
||||
)
|
||||
self.assertIn("email required", str(response.content))
|
||||
|
||||
# Test invalid code
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
data={"component": "ak-stage-authenticator-email", "code": "000000"},
|
||||
)
|
||||
self.assertIn("Code does not match", str(response.content))
|
||||
|
||||
# Test valid code
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
device = self.device
|
||||
token = device.token
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
data={"component": "ak-stage-authenticator-email", "code": token},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(device.confirmed)
|
||||
|
||||
def test_challenge_generation(self):
|
||||
"""Test challenge generation"""
|
||||
# Test with masked email
|
||||
with patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
PropertyMock(return_value=EmailBackend),
|
||||
):
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
self.user,
|
||||
component="ak-stage-authenticator-email",
|
||||
email_required=False,
|
||||
)
|
||||
masked_email = mask_email(self.user.email)
|
||||
self.assertEqual(masked_email, response.json()["email"])
|
||||
|
||||
# Test without email
|
||||
self.client.force_login(self.user_noemail)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
self.user_noemail,
|
||||
component="ak-stage-authenticator-email",
|
||||
email_required=True,
|
||||
)
|
||||
self.assertIsNone(response.json()["email"])
|
||||
|
||||
def test_session_management(self):
|
||||
"""Test session device management"""
|
||||
# Test device creation in session
|
||||
with patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
PropertyMock(return_value=EmailBackend),
|
||||
):
|
||||
# Delete any existing devices for this test
|
||||
EmailDevice.objects.filter(user=self.user).delete()
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
)
|
||||
self.assertIn(SESSION_KEY_EMAIL_DEVICE, self.client.session)
|
||||
device = self.client.session[SESSION_KEY_EMAIL_DEVICE]
|
||||
self.assertIsInstance(device, EmailDevice)
|
||||
self.assertFalse(device.confirmed)
|
||||
self.assertEqual(device.user, self.user)
|
||||
|
||||
# Test device confirmation and cleanup
|
||||
device.confirmed = True
|
||||
device.email = "new_test@authentik.local" # Use a different email
|
||||
self.client.session[SESSION_KEY_EMAIL_DEVICE] = device
|
||||
self.client.session.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
data={"component": "ak-stage-authenticator-email", "code": device.token},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(device.confirmed)
|
||||
# Session key should be removed after device is saved
|
||||
device.save()
|
||||
self.assertNotIn(SESSION_KEY_EMAIL_DEVICE, self.client.session)
|
||||
|
||||
def test_model_properties_and_methods(self):
|
||||
"""Test model properties"""
|
||||
device = self.device
|
||||
stage = self.stage
|
||||
|
||||
self.assertEqual(stage.serializer, AuthenticatorEmailStageSerializer)
|
||||
self.assertIsInstance(stage.backend, EmailBackend)
|
||||
self.assertEqual(device.serializer, EmailDeviceSerializer)
|
||||
|
||||
# Test AuthenticatorEmailStage send method
|
||||
with patch(
|
||||
"authentik.stages.authenticator_email.models.AuthenticatorEmailStage.backend_class",
|
||||
return_value=EmailBackend,
|
||||
):
|
||||
self.device.generate_token()
|
||||
# Test EmailDevice _compose_email method
|
||||
message = self.device._compose_email()
|
||||
self.assertIsInstance(message, TemplateEmailMessage)
|
||||
self.assertEqual(message.subject, self.stage.subject)
|
||||
self.assertEqual(message.to, [f"{self.user.name} <{self.device.email}>"])
|
||||
self.assertTrue(self.device.token in message.body)
|
||||
# Test AuthenticatorEmailStage send method
|
||||
self.stage.send(device)
|
||||
|
||||
def test_email_tasks(self):
|
||||
|
||||
email_send_mock = MagicMock()
|
||||
with patch(
|
||||
"authentik.stages.email.tasks.send_mails",
|
||||
email_send_mock,
|
||||
):
|
||||
# Test AuthenticatorEmailStage send method
|
||||
self.stage.send(self.device)
|
||||
email_send_mock.assert_called_once()
|
@ -1,17 +0,0 @@
|
||||
"""API URLs"""
|
||||
|
||||
from authentik.stages.authenticator_email.api import (
|
||||
AuthenticatorEmailStageViewSet,
|
||||
EmailAdminDeviceViewSet,
|
||||
EmailDeviceViewSet,
|
||||
)
|
||||
|
||||
api_urlpatterns = [
|
||||
("authenticators/email", EmailDeviceViewSet),
|
||||
(
|
||||
"authenticators/admin/email",
|
||||
EmailAdminDeviceViewSet,
|
||||
"admin-emaildevice",
|
||||
),
|
||||
("stages/authenticator/email", AuthenticatorEmailStageViewSet),
|
||||
]
|
@ -26,13 +26,10 @@ from authentik.events.middleware import audit_ignore
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE
|
||||
from authentik.lib.utils.email import mask_email
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.authenticator import match_token
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_duo.models import AuthenticatorDuoStage, DuoDevice
|
||||
from authentik.stages.authenticator_email.models import EmailDevice
|
||||
from authentik.stages.authenticator_sms.models import SMSDevice
|
||||
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage, DeviceClasses
|
||||
from authentik.stages.authenticator_webauthn.models import UserVerification, WebAuthnDevice
|
||||
@ -57,8 +54,6 @@ def get_challenge_for_device(
|
||||
"""Generate challenge for a single device"""
|
||||
if isinstance(device, WebAuthnDevice):
|
||||
return get_webauthn_challenge(request, stage, device)
|
||||
if isinstance(device, EmailDevice):
|
||||
return {"email": mask_email(device.email)}
|
||||
# Code-based challenges have no hints
|
||||
return {}
|
||||
|
||||
@ -108,8 +103,6 @@ def select_challenge(request: HttpRequest, device: Device):
|
||||
"""Callback when the user selected a challenge in the frontend."""
|
||||
if isinstance(device, SMSDevice):
|
||||
select_challenge_sms(request, device)
|
||||
elif isinstance(device, EmailDevice):
|
||||
select_challenge_email(request, device)
|
||||
|
||||
|
||||
def select_challenge_sms(request: HttpRequest, device: SMSDevice):
|
||||
@ -118,13 +111,6 @@ def select_challenge_sms(request: HttpRequest, device: SMSDevice):
|
||||
device.stage.send(device.token, device)
|
||||
|
||||
|
||||
def select_challenge_email(request: HttpRequest, device: EmailDevice):
|
||||
"""Send Email"""
|
||||
valid_secs: int = timedelta_from_string(device.stage.token_expiry).total_seconds()
|
||||
device.generate_token(valid_secs=valid_secs)
|
||||
device.stage.send(device)
|
||||
|
||||
|
||||
def validate_challenge_code(code: str, stage_view: StageView, user: User) -> Device:
|
||||
"""Validate code-based challenges. We test against every device, on purpose, as
|
||||
the user mustn't choose between totp and static devices."""
|
||||
|
@ -1,37 +0,0 @@
|
||||
# Generated by Django 5.0.10 on 2025-01-16 02:48
|
||||
|
||||
import authentik.stages.authenticator_validate.models
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
(
|
||||
"authentik_stages_authenticator_validate",
|
||||
"0013_authenticatorvalidatestage_webauthn_allowed_device_types",
|
||||
),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="authenticatorvalidatestage",
|
||||
name="device_classes",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.TextField(
|
||||
choices=[
|
||||
("static", "Static"),
|
||||
("totp", "TOTP"),
|
||||
("webauthn", "WebAuthn"),
|
||||
("duo", "Duo"),
|
||||
("sms", "SMS"),
|
||||
("email", "Email"),
|
||||
]
|
||||
),
|
||||
default=authentik.stages.authenticator_validate.models.default_device_classes,
|
||||
help_text="Device classes which can be used to authenticate",
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
@ -20,7 +20,6 @@ class DeviceClasses(models.TextChoices):
|
||||
WEBAUTHN = "webauthn", _("WebAuthn")
|
||||
DUO = "duo", _("Duo")
|
||||
SMS = "sms", _("SMS")
|
||||
EMAIL = "email", _("Email")
|
||||
|
||||
|
||||
def default_device_classes() -> list:
|
||||
@ -31,7 +30,6 @@ def default_device_classes() -> list:
|
||||
DeviceClasses.WEBAUTHN,
|
||||
DeviceClasses.DUO,
|
||||
DeviceClasses.SMS,
|
||||
DeviceClasses.EMAIL,
|
||||
]
|
||||
|
||||
|
||||
|
@ -23,7 +23,6 @@ from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.stages.authenticator import devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_email.models import EmailDevice
|
||||
from authentik.stages.authenticator_sms.models import SMSDevice
|
||||
from authentik.stages.authenticator_validate.challenge import (
|
||||
DeviceChallenge,
|
||||
@ -85,9 +84,7 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
|
||||
def validate_code(self, code: str) -> str:
|
||||
"""Validate code-based response, raise error if code isn't allowed"""
|
||||
self._challenge_allowed(
|
||||
[DeviceClasses.TOTP, DeviceClasses.STATIC, DeviceClasses.SMS, DeviceClasses.EMAIL]
|
||||
)
|
||||
self._challenge_allowed([DeviceClasses.TOTP, DeviceClasses.STATIC, DeviceClasses.SMS])
|
||||
self.device = validate_challenge_code(code, self.stage, self.stage.get_pending_user())
|
||||
return code
|
||||
|
||||
@ -120,17 +117,12 @@ class AuthenticatorValidationChallengeResponse(ChallengeResponse):
|
||||
if not allowed:
|
||||
raise ValidationError("invalid challenge selected")
|
||||
|
||||
device_class = challenge.get("device_class", "")
|
||||
if device_class == "sms":
|
||||
devices = SMSDevice.objects.filter(pk=int(challenge.get("device_uid", "0")))
|
||||
if not devices.exists():
|
||||
raise ValidationError("invalid challenge selected")
|
||||
select_challenge(self.stage.request, devices.first())
|
||||
elif device_class == "email":
|
||||
devices = EmailDevice.objects.filter(pk=int(challenge.get("device_uid", "0")))
|
||||
if not devices.exists():
|
||||
raise ValidationError("invalid challenge selected")
|
||||
select_challenge(self.stage.request, devices.first())
|
||||
if challenge.get("device_class", "") != "sms":
|
||||
return challenge
|
||||
devices = SMSDevice.objects.filter(pk=int(challenge.get("device_uid", "0")))
|
||||
if not devices.exists():
|
||||
raise ValidationError("invalid challenge selected")
|
||||
select_challenge(self.stage.request, devices.first())
|
||||
return challenge
|
||||
|
||||
def validate_selected_stage(self, stage_pk: str) -> str:
|
||||
|
@ -1,183 +0,0 @@
|
||||
"""Test validator stage for Email devices"""
|
||||
|
||||
from django.test.client import RequestFactory
|
||||
from django.urls.base import reverse
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.flows.models import FlowStageBinding, NotConfiguredAction
|
||||
from authentik.flows.tests import FlowTestCase
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.utils.email import mask_email
|
||||
from authentik.stages.authenticator_email.models import AuthenticatorEmailStage, EmailDevice
|
||||
from authentik.stages.authenticator_validate.models import AuthenticatorValidateStage, DeviceClasses
|
||||
from authentik.stages.identification.models import IdentificationStage, UserFields
|
||||
|
||||
|
||||
class AuthenticatorValidateStageEmailTests(FlowTestCase):
|
||||
"""Test validator stage for Email devices"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
self.request_factory = RequestFactory()
|
||||
# Create email authenticator stage
|
||||
self.stage = AuthenticatorEmailStage.objects.create(
|
||||
name="email-authenticator",
|
||||
use_global_settings=True,
|
||||
from_address="test@authentik.local",
|
||||
)
|
||||
# Create identification stage
|
||||
self.ident_stage = IdentificationStage.objects.create(
|
||||
name=generate_id(),
|
||||
user_fields=[UserFields.USERNAME],
|
||||
)
|
||||
# Create validation stage
|
||||
self.validate_stage = AuthenticatorValidateStage.objects.create(
|
||||
name=generate_id(),
|
||||
device_classes=[DeviceClasses.EMAIL],
|
||||
)
|
||||
# Create flow with both stages
|
||||
self.flow = create_test_flow()
|
||||
FlowStageBinding.objects.create(target=self.flow, stage=self.ident_stage, order=0)
|
||||
FlowStageBinding.objects.create(target=self.flow, stage=self.validate_stage, order=1)
|
||||
|
||||
def _identify_user(self):
|
||||
"""Helper to identify user in flow"""
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
{"uid_field": self.user.username},
|
||||
follow=True,
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
return response
|
||||
|
||||
def _send_challenge(self, device):
|
||||
"""Helper to send challenge for device"""
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
{
|
||||
"component": "ak-stage-authenticator-validate",
|
||||
"selected_challenge": {
|
||||
"device_class": "email",
|
||||
"device_uid": str(device.pk),
|
||||
"challenge": {},
|
||||
"last_used": device.last_used.isoformat() if device.last_used else None,
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
return response
|
||||
|
||||
def test_happy_path(self):
|
||||
"""Test validator stage with valid code"""
|
||||
# Create a device for our user
|
||||
device = EmailDevice.objects.create(
|
||||
user=self.user,
|
||||
confirmed=True,
|
||||
stage=self.stage,
|
||||
email="xx@0.co",
|
||||
) # Short email for testing purposes
|
||||
|
||||
# First identify the user
|
||||
self._identify_user()
|
||||
|
||||
# Send the challenge
|
||||
response = self._send_challenge(device)
|
||||
response_data = self.assertStageResponse(
|
||||
response,
|
||||
flow=self.flow,
|
||||
component="ak-stage-authenticator-validate",
|
||||
)
|
||||
|
||||
# Get the device challenge from the response and verify it matches
|
||||
device_challenge = response_data["device_challenges"][0]
|
||||
self.assertEqual(device_challenge["device_class"], "email")
|
||||
self.assertEqual(device_challenge["device_uid"], str(device.pk))
|
||||
self.assertEqual(device_challenge["challenge"], {"email": mask_email(device.email)})
|
||||
|
||||
# Generate a token for the device
|
||||
device.generate_token()
|
||||
|
||||
# Submit the valid code
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
{"component": "ak-stage-authenticator-validate", "code": device.token},
|
||||
)
|
||||
# Should redirect to root since this is the last stage
|
||||
self.assertStageRedirects(response, "/")
|
||||
|
||||
def test_no_device(self):
|
||||
"""Test validator stage without configured device"""
|
||||
configuration_stage = AuthenticatorEmailStage.objects.create(
|
||||
name=generate_id(),
|
||||
use_global_settings=True,
|
||||
from_address="test@authentik.local",
|
||||
)
|
||||
stage = AuthenticatorValidateStage.objects.create(
|
||||
name=generate_id(),
|
||||
not_configured_action=NotConfiguredAction.CONFIGURE,
|
||||
device_classes=[DeviceClasses.EMAIL],
|
||||
)
|
||||
stage.configuration_stages.set([configuration_stage])
|
||||
flow = create_test_flow()
|
||||
FlowStageBinding.objects.create(target=flow, stage=stage, order=2)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
{"component": "ak-stage-authenticator-validate"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
response_data = self.assertStageResponse(
|
||||
response,
|
||||
flow=flow,
|
||||
component="ak-stage-authenticator-validate",
|
||||
)
|
||||
self.assertEqual(response_data["configuration_stages"], [])
|
||||
self.assertEqual(response_data["device_challenges"], [])
|
||||
self.assertEqual(
|
||||
response_data["response_errors"],
|
||||
{"non_field_errors": [{"code": "invalid", "string": "Empty response"}]},
|
||||
)
|
||||
|
||||
def test_invalid_code(self):
|
||||
"""Test validator stage with invalid code"""
|
||||
# Create a device for our user
|
||||
device = EmailDevice.objects.create(
|
||||
user=self.user,
|
||||
confirmed=True,
|
||||
stage=self.stage,
|
||||
email="test@authentik.local",
|
||||
)
|
||||
|
||||
# First identify the user
|
||||
self._identify_user()
|
||||
|
||||
# Send the challenge
|
||||
self._send_challenge(device)
|
||||
|
||||
# Generate a token for the device
|
||||
device.generate_token()
|
||||
|
||||
# Try invalid code and verify error message
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
{"component": "ak-stage-authenticator-validate", "code": "invalid"},
|
||||
)
|
||||
response_data = self.assertStageResponse(
|
||||
response,
|
||||
flow=self.flow,
|
||||
component="ak-stage-authenticator-validate",
|
||||
)
|
||||
self.assertEqual(
|
||||
response_data["response_errors"],
|
||||
{
|
||||
"code": [
|
||||
{
|
||||
"code": "invalid",
|
||||
"string": (
|
||||
"Invalid Token. Please ensure the time on your device "
|
||||
"is accurate and try again."
|
||||
),
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
@ -13,28 +13,17 @@ from structlog.stdlib import get_logger
|
||||
from authentik.events.models import Event, EventAction, TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.stages.authenticator_email.models import AuthenticatorEmailStage
|
||||
from authentik.stages.email.models import EmailStage
|
||||
from authentik.stages.email.utils import logo_data
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def send_mails(
|
||||
stage: EmailStage | AuthenticatorEmailStage, *messages: list[EmailMultiAlternatives]
|
||||
):
|
||||
"""Wrapper to convert EmailMessage to dict and send it from worker
|
||||
|
||||
Args:
|
||||
stage: Either an EmailStage or AuthenticatorEmailStage instance
|
||||
messages: List of email messages to send
|
||||
Returns:
|
||||
Celery group promise for the email sending tasks
|
||||
"""
|
||||
def send_mails(stage: EmailStage, *messages: list[EmailMultiAlternatives]):
|
||||
"""Wrapper to convert EmailMessage to dict and send it from worker"""
|
||||
tasks = []
|
||||
stage_class = stage.__class__
|
||||
for message in messages:
|
||||
tasks.append(send_mail.s(message.__dict__, stage_class, str(stage.pk)))
|
||||
tasks.append(send_mail.s(message.__dict__, str(stage.pk)))
|
||||
lazy_group = group(*tasks)
|
||||
promise = lazy_group()
|
||||
return promise
|
||||
@ -58,28 +47,23 @@ def get_email_body(email: EmailMultiAlternatives) -> str:
|
||||
retry_backoff=True,
|
||||
base=SystemTask,
|
||||
)
|
||||
def send_mail(
|
||||
self: SystemTask,
|
||||
message: dict[Any, Any],
|
||||
stage_class: EmailStage | AuthenticatorEmailStage = EmailStage,
|
||||
email_stage_pk: str | None = None,
|
||||
):
|
||||
def send_mail(self: SystemTask, message: dict[Any, Any], email_stage_pk: str | None = None):
|
||||
"""Send Email for Email Stage. Retries are scheduled automatically."""
|
||||
self.save_on_success = False
|
||||
message_id = make_msgid(domain=DNS_NAME)
|
||||
self.set_uid(slugify(message_id.replace(".", "_").replace("@", "_")))
|
||||
try:
|
||||
if not email_stage_pk:
|
||||
stage: EmailStage | AuthenticatorEmailStage = stage_class(use_global_settings=True)
|
||||
stage: EmailStage = EmailStage(use_global_settings=True)
|
||||
else:
|
||||
stages = stage_class.objects.filter(pk=email_stage_pk)
|
||||
stages = EmailStage.objects.filter(pk=email_stage_pk)
|
||||
if not stages.exists():
|
||||
self.set_status(
|
||||
TaskStatus.WARNING,
|
||||
"Email stage does not exist anymore. Discarding message.",
|
||||
)
|
||||
return
|
||||
stage: EmailStage | AuthenticatorEmailStage = stages.first()
|
||||
stage: EmailStage = stages.first()
|
||||
try:
|
||||
backend = stage.backend
|
||||
except ValueError as exc:
|
||||
|
@ -1,30 +0,0 @@
|
||||
version: 1
|
||||
metadata:
|
||||
labels:
|
||||
blueprints.goauthentik.io/instantiate: "false"
|
||||
name: Example - Email MFA setup flow
|
||||
entries:
|
||||
- attrs:
|
||||
designation: stage_configuration
|
||||
name: Default Email Authenticator Flow
|
||||
title: Setup Email Two-Factor Authentication
|
||||
authentication: require_authenticated
|
||||
identifiers:
|
||||
slug: default-authenticator-email-setup
|
||||
model: authentik_flows.flow
|
||||
id: flow
|
||||
- attrs:
|
||||
configure_flow: !KeyOf flow
|
||||
friendly_name: Email Authenticator
|
||||
use_global_settings: true
|
||||
token_expiry: minutes=30
|
||||
subject: authentik Sign-in code
|
||||
identifiers:
|
||||
name: default-authenticator-email-setup
|
||||
id: default-authenticator-email-setup
|
||||
model: authentik_stages_authenticator_email.authenticatoremailstage
|
||||
- identifiers:
|
||||
order: 0
|
||||
stage: !KeyOf default-authenticator-email-setup
|
||||
target: !KeyOf flow
|
||||
model: authentik_flows.flowstagebinding
|
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,6 @@ import (
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/config"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ak/healthcheck"
|
||||
@ -25,8 +24,7 @@ Required environment variables:
|
||||
- AUTHENTIK_INSECURE: Skip SSL Certificate verification`
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Long: helpMessage,
|
||||
Version: constants.FullVersion(),
|
||||
Long: helpMessage,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/config"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ak/healthcheck"
|
||||
@ -28,8 +27,7 @@ Optionally, you can set these:
|
||||
- AUTHENTIK_HOST_BROWSER: URL to use in the browser, when it differs from AUTHENTIK_HOST`
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Long: helpMessage,
|
||||
Version: constants.FullVersion(),
|
||||
Long: helpMessage,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{
|
||||
|
@ -9,7 +9,6 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ak/healthcheck"
|
||||
@ -24,8 +23,7 @@ Required environment variables:
|
||||
- AUTHENTIK_INSECURE: Skip SSL Certificate verification`
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Long: helpMessage,
|
||||
Version: constants.FullVersion(),
|
||||
Long: helpMessage,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{
|
||||
|
@ -9,7 +9,6 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ak/healthcheck"
|
||||
@ -24,8 +23,7 @@ Required environment variables:
|
||||
- AUTHENTIK_INSECURE: Skip SSL Certificate verification`
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Long: helpMessage,
|
||||
Version: constants.FullVersion(),
|
||||
Long: helpMessage,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{
|
||||
|
185
cmd/scim/main.go
Normal file
185
cmd/scim/main.go
Normal file
@ -0,0 +1,185 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"goauthentik.io/internal/common"
|
||||
"goauthentik.io/internal/debug"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/outpost/ak/healthcheck"
|
||||
)
|
||||
|
||||
const helpMessage = `authentik SCIM
|
||||
|
||||
Required environment variables:
|
||||
- AUTHENTIK_HOST: URL to connect to (format "http://authentik.company")
|
||||
- AUTHENTIK_TOKEN: Token to authenticate with
|
||||
- AUTHENTIK_INSECURE: Skip SSL Certificate verification`
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Long: helpMessage,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{
|
||||
FieldMap: log.FieldMap{
|
||||
log.FieldKeyMsg: "event",
|
||||
log.FieldKeyTime: "timestamp",
|
||||
},
|
||||
DisableHTMLEscape: true,
|
||||
})
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
debug.EnableDebugServer()
|
||||
akURL, found := os.LookupEnv("AUTHENTIK_HOST")
|
||||
if !found {
|
||||
fmt.Println("env AUTHENTIK_HOST not set!")
|
||||
fmt.Println(helpMessage)
|
||||
os.Exit(1)
|
||||
}
|
||||
akToken, found := os.LookupEnv("AUTHENTIK_TOKEN")
|
||||
if !found {
|
||||
fmt.Println("env AUTHENTIK_TOKEN not set!")
|
||||
fmt.Println(helpMessage)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
akURLActual, err := url.Parse(akURL)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
fmt.Println(helpMessage)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ex := common.Init()
|
||||
defer common.Defer()
|
||||
go func() {
|
||||
for {
|
||||
<-ex
|
||||
os.Exit(0)
|
||||
}
|
||||
}()
|
||||
|
||||
ac := ak.NewAPIController(*akURLActual, akToken)
|
||||
if ac == nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
defer ac.Shutdown()
|
||||
|
||||
ac.Server = &SCIMOutpost{
|
||||
ac: ac,
|
||||
log: log.WithField("logger", "authentik.outpost.scim"),
|
||||
}
|
||||
|
||||
err = ac.Start()
|
||||
if err != nil {
|
||||
log.WithError(err).Panic("Failed to run server")
|
||||
}
|
||||
|
||||
for {
|
||||
<-ex
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
type HTTPRequest struct {
|
||||
Uid string `mapstructure:"uid"`
|
||||
Method string `mapstructure:"method"`
|
||||
URL string `mapstructure:"url"`
|
||||
Headers map[string][]string `mapstructure:"headers"`
|
||||
Body interface{} `mapstructure:"body"`
|
||||
SSLVerify bool `mapstructure:"ssl_verify"`
|
||||
Timeout int `mapstructure:"timeout"`
|
||||
}
|
||||
|
||||
type RequestArgs struct {
|
||||
Request HTTPRequest `mapstructure:"request"`
|
||||
ResponseChannel string `mapstructure:"response_channel"`
|
||||
}
|
||||
|
||||
type SCIMOutpost struct {
|
||||
ac *ak.APIController
|
||||
log *log.Entry
|
||||
}
|
||||
|
||||
func (s *SCIMOutpost) Type() string { return "SCIM" }
|
||||
func (s *SCIMOutpost) Stop() error { return nil }
|
||||
func (s *SCIMOutpost) Refresh() error { return nil }
|
||||
func (s *SCIMOutpost) TimerFlowCacheExpiry(context.Context) {}
|
||||
|
||||
func (s *SCIMOutpost) Start() error {
|
||||
s.ac.AddWSHandler(func(ctx context.Context, args map[string]interface{}) {
|
||||
rd := RequestArgs{}
|
||||
err := mapstructure.Decode(args, &rd)
|
||||
if err != nil {
|
||||
s.log.WithError(err).Warning("failed to parse http request")
|
||||
return
|
||||
}
|
||||
s.log.WithField("rd", rd).WithField("raw", args).Debug("request data")
|
||||
ctx, canc := context.WithTimeout(ctx, time.Duration(rd.Request.Timeout)*time.Second)
|
||||
defer canc()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, rd.Request.Method, rd.Request.URL, nil)
|
||||
if err != nil {
|
||||
s.log.WithError(err).Warning("failed to create request")
|
||||
return
|
||||
}
|
||||
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: !rd.Request.SSLVerify},
|
||||
TLSHandshakeTimeout: time.Duration(rd.Request.Timeout) * time.Second,
|
||||
IdleConnTimeout: time.Duration(rd.Request.Timeout) * time.Second,
|
||||
ResponseHeaderTimeout: time.Duration(rd.Request.Timeout) * time.Second,
|
||||
ExpectContinueTimeout: time.Duration(rd.Request.Timeout) * time.Second,
|
||||
}
|
||||
c := &http.Client{
|
||||
Transport: tr,
|
||||
}
|
||||
s.log.WithField("url", req.URL.Host).Debug("sending HTTP request")
|
||||
res, err := c.Do(req)
|
||||
if err != nil {
|
||||
s.log.WithError(err).Warning("failed to send request")
|
||||
return
|
||||
}
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
s.log.WithError(err).Warning("failed to read body")
|
||||
return
|
||||
}
|
||||
s.log.WithField("res", res.StatusCode).Debug("sending HTTP response")
|
||||
err = s.ac.SendWS(ak.WebsocketInstructionProviderSpecific, map[string]interface{}{
|
||||
"sub_type": "http_response",
|
||||
"response_channel": rd.ResponseChannel,
|
||||
"response": map[string]interface{}{
|
||||
"status": res.StatusCode,
|
||||
"final_url": res.Request.URL.String(),
|
||||
"headers": res.Header,
|
||||
"body": base64.StdEncoding.EncodeToString(body),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
s.log.WithError(err).Warning("failed to send http response")
|
||||
return
|
||||
}
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
rootCmd.AddCommand(healthcheck.Command)
|
||||
err := rootCmd.Execute()
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
@ -31,7 +31,7 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.0}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -54,7 +54,7 @@ services:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.0}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
6
go.mod
6
go.mod
@ -26,10 +26,10 @@ require (
|
||||
github.com/redis/go-redis/v9 v9.7.0
|
||||
github.com/sethvargo/go-envconfig v1.1.1
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/wwt/guac v1.3.2
|
||||
goauthentik.io/api/v3 v3.2024123.6
|
||||
goauthentik.io/api/v3 v3.2024123.4
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||
golang.org/x/oauth2 v0.26.0
|
||||
golang.org/x/sync v0.11.0
|
||||
@ -71,7 +71,7 @@ require (
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.55.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
go.mongodb.org/mongo-driver v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||
|
14
go.sum
14
go.sum
@ -57,7 +57,7 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo=
|
||||
github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@ -259,10 +259,10 @@ github.com/sethvargo/go-envconfig v1.1.1/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
@ -299,8 +299,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
goauthentik.io/api/v3 v3.2024123.6 h1:AGOCa7Fc/9eONCPEW4sEhTiyEBvxN57Lfqz1zm6Gy98=
|
||||
goauthentik.io/api/v3 v3.2024123.6/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
goauthentik.io/api/v3 v3.2024123.4 h1:JYLsUjkJ7kT+jHO72DyFTXFwKEGAcOOlLh36SRG9BDw=
|
||||
goauthentik.io/api/v3 v3.2024123.4/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
|
@ -29,4 +29,4 @@ func UserAgent() string {
|
||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2025.2.0"
|
||||
const VERSION = "2024.12.3"
|
||||
|
@ -95,7 +95,7 @@ func NewAPIController(akURL url.URL, token string) *APIController {
|
||||
time.Sleep(time.Second * 3)
|
||||
}
|
||||
if len(outposts.Results) < 1 {
|
||||
panic("No outposts found with given token, ensure the given token corresponds to an authenitk Outpost")
|
||||
panic("No outposts found with given token, ensure the given token corresponds to an authentik Outpost")
|
||||
}
|
||||
outpost := outposts.Results[0]
|
||||
|
||||
|
@ -233,15 +233,19 @@ func (a *APIController) AddWSHandler(handler WSHandler) {
|
||||
a.wsHandlers = append(a.wsHandlers, handler)
|
||||
}
|
||||
|
||||
func (a *APIController) SendWS(inst WebsocketInstruction, args map[string]interface{}) error {
|
||||
msg := websocketMessage{
|
||||
Instruction: inst,
|
||||
Args: args,
|
||||
}
|
||||
err := a.wsConn.WriteJSON(msg)
|
||||
return err
|
||||
}
|
||||
|
||||
func (a *APIController) SendWSHello(args map[string]interface{}) error {
|
||||
allArgs := a.getWebsocketPingArgs()
|
||||
for key, value := range args {
|
||||
allArgs[key] = value
|
||||
}
|
||||
aliveMsg := websocketMessage{
|
||||
Instruction: WebsocketInstructionHello,
|
||||
Args: allArgs,
|
||||
}
|
||||
err := a.wsConn.WriteJSON(aliveMsg)
|
||||
return err
|
||||
return a.SendWS(WebsocketInstructionHello, args)
|
||||
}
|
||||
|
@ -1,19 +1,19 @@
|
||||
package ak
|
||||
|
||||
type websocketInstruction int
|
||||
type WebsocketInstruction int
|
||||
|
||||
const (
|
||||
// WebsocketInstructionAck Code used to acknowledge a previous message
|
||||
WebsocketInstructionAck websocketInstruction = 0
|
||||
WebsocketInstructionAck WebsocketInstruction = 0
|
||||
// WebsocketInstructionHello Code used to send a healthcheck keepalive
|
||||
WebsocketInstructionHello websocketInstruction = 1
|
||||
WebsocketInstructionHello WebsocketInstruction = 1
|
||||
// WebsocketInstructionTriggerUpdate Code received to trigger a config update
|
||||
WebsocketInstructionTriggerUpdate websocketInstruction = 2
|
||||
WebsocketInstructionTriggerUpdate WebsocketInstruction = 2
|
||||
// WebsocketInstructionProviderSpecific Code received to trigger some provider specific function
|
||||
WebsocketInstructionProviderSpecific websocketInstruction = 3
|
||||
WebsocketInstructionProviderSpecific WebsocketInstruction = 3
|
||||
)
|
||||
|
||||
type websocketMessage struct {
|
||||
Instruction websocketInstruction `json:"instruction"`
|
||||
Instruction WebsocketInstruction `json:"instruction"`
|
||||
Args map[string]interface{} `json:"args"`
|
||||
}
|
||||
|
8
lifecycle/aws/package-lock.json
generated
8
lifecycle/aws/package-lock.json
generated
@ -9,7 +9,7 @@
|
||||
"version": "0.0.0",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.179.0",
|
||||
"aws-cdk": "^2.178.2",
|
||||
"cross-env": "^7.0.3"
|
||||
},
|
||||
"engines": {
|
||||
@ -17,9 +17,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk": {
|
||||
"version": "2.179.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.179.0.tgz",
|
||||
"integrity": "sha512-aA2+8S2g4UBQHkUEt0mYd16VLt/ucR+QfyUJi34LDKRAhOCNDjPCZ4z9z/JEDyuni0BdzsYA55pnpDN9tMULpA==",
|
||||
"version": "2.178.2",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.178.2.tgz",
|
||||
"integrity": "sha512-ojMCMnBGinvDUD6+BOOlUOB9pjsYXoQdFVbf4bvi3dy3nwn557r0j6qDUcJMeikzPJ6YWzfAdL0fYxBZg4xcOg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
|
@ -10,7 +10,7 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.179.0",
|
||||
"aws-cdk": "^2.178.2",
|
||||
"cross-env": "^7.0.3"
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ Parameters:
|
||||
Description: authentik Docker image
|
||||
AuthentikVersion:
|
||||
Type: String
|
||||
Default: 2025.2.0
|
||||
Default: 2024.12.3
|
||||
Description: authentik Docker image tag
|
||||
AuthentikServerCPU:
|
||||
Type: Number
|
||||
|
Binary file not shown.
@ -8,7 +8,7 @@
|
||||
# 刘松, 2022
|
||||
# Tianhao Chai <cth451@gmail.com>, 2024
|
||||
# Jens L. <jens@goauthentik.io>, 2024
|
||||
# deluxghost, 2025
|
||||
# deluxghost, 2024
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
@ -17,7 +17,7 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: deluxghost, 2025\n"
|
||||
"Last-Translator: deluxghost, 2024\n"
|
||||
"Language-Team: Chinese Simplified (https://app.transifex.com/authentik/teams/119923/zh-Hans/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@ -568,39 +568,39 @@ msgstr "签名密钥"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr "用于签名 SSF 时间的密钥。"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr "Shared Signals Framework 提供程序"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr "Shared Signals Framework 提供程序"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr "向 SSF 提供程序添加流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr "SSF 流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr "SSF 流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr "SSF 流事件"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr "SSF 流事件"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr "发送请求失败"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
@ -878,7 +878,7 @@ msgstr "在流程规划过程中评估策略。"
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr "在阶段呈现给用户时评估策略。"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid ""
|
||||
|
@ -7,7 +7,7 @@
|
||||
# Chen Zhikai, 2022
|
||||
# 刘松, 2022
|
||||
# Jens L. <jens@goauthentik.io>, 2024
|
||||
# deluxghost, 2025
|
||||
# deluxghost, 2024
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
@ -16,7 +16,7 @@ msgstr ""
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2025-02-14 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-09-26 16:47+0000\n"
|
||||
"Last-Translator: deluxghost, 2025\n"
|
||||
"Last-Translator: deluxghost, 2024\n"
|
||||
"Language-Team: Chinese (China) (https://app.transifex.com/authentik/teams/119923/zh_CN/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@ -567,39 +567,39 @@ msgstr "签名密钥"
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Key used to sign the SSF Events."
|
||||
msgstr "用于签名 SSF 时间的密钥。"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Provider"
|
||||
msgstr "Shared Signals Framework 提供程序"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Shared Signals Framework Providers"
|
||||
msgstr "Shared Signals Framework 提供程序"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "Add stream to SSF provider"
|
||||
msgstr "向 SSF 提供程序添加流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream"
|
||||
msgstr "SSF 流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Streams"
|
||||
msgstr "SSF 流"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Event"
|
||||
msgstr "SSF 流事件"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/models.py
|
||||
msgid "SSF Stream Events"
|
||||
msgstr "SSF 流事件"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/providers/ssf/tasks.py
|
||||
msgid "Failed to send request"
|
||||
msgstr "发送请求失败"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
|
||||
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
|
||||
@ -877,7 +877,7 @@ msgstr "在流程规划过程中评估策略。"
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid "Evaluate policies when the Stage is presented to the user."
|
||||
msgstr "在阶段呈现给用户时评估策略。"
|
||||
msgstr ""
|
||||
|
||||
#: authentik/flows/models.py
|
||||
msgid ""
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "@goauthentik/authentik",
|
||||
"version": "2025.2.0",
|
||||
"version": "2024.12.3",
|
||||
"private": true
|
||||
}
|
||||
|
88
poetry.lock
generated
88
poetry.lock
generated
@ -358,6 +358,22 @@ jsii = ">=1.105.0,<2.0.0"
|
||||
publication = ">=0.0.3"
|
||||
typeguard = ">=2.13.3,<4.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "aws-cdk-asset-kubectl-v20"
|
||||
version = "2.1.3"
|
||||
description = "A Lambda Layer that contains kubectl v1.20"
|
||||
optional = false
|
||||
python-versions = "~=3.8"
|
||||
files = [
|
||||
{file = "aws_cdk.asset_kubectl_v20-2.1.3-py3-none-any.whl", hash = "sha256:d5612e5bd03c215a28ce53193b1144ecf4e93b3b6779563c046a8a74d83a3979"},
|
||||
{file = "aws_cdk_asset_kubectl_v20-2.1.3.tar.gz", hash = "sha256:237cd8530d9e8be0bbc7159af927dbb6b7f91bf3f4099c8ef4d9a213b34264be"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jsii = ">=1.103.1,<2.0.0"
|
||||
publication = ">=0.0.3"
|
||||
typeguard = ">=2.13.3,<5.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "aws-cdk-asset-node-proxy-agent-v6"
|
||||
version = "2.1.0"
|
||||
@ -392,17 +408,18 @@ typeguard = ">=2.13.3,<4.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "aws-cdk-lib"
|
||||
version = "2.179.0"
|
||||
version = "2.178.2"
|
||||
description = "Version 2 of the AWS Cloud Development Kit library"
|
||||
optional = false
|
||||
python-versions = "~=3.8"
|
||||
files = [
|
||||
{file = "aws_cdk_lib-2.179.0-py3-none-any.whl", hash = "sha256:1d7b88ee69067b8d58dac9eeb6697bbaf5d5c032a3070898389c41e7c4f3e3d7"},
|
||||
{file = "aws_cdk_lib-2.179.0.tar.gz", hash = "sha256:b653a55754f4020a4b36e4ae183d213e76e27b18b842cbf9e430e9eccb700550"},
|
||||
{file = "aws_cdk_lib-2.178.2-py3-none-any.whl", hash = "sha256:624383e57fe2b32f7d0fc098b78b4cd21d19ae3af3f24b01f32ec4795baaee25"},
|
||||
{file = "aws_cdk_lib-2.178.2.tar.gz", hash = "sha256:c00757885b74023350bb34f388f6447155e802ecf827e595bda917098a4925fe"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
"aws-cdk.asset-awscli-v1" = ">=2.2.208,<3.0.0"
|
||||
"aws-cdk.asset-kubectl-v20" = ">=2.1.3,<3.0.0"
|
||||
"aws-cdk.asset-node-proxy-agent-v6" = ">=2.1.0,<3.0.0"
|
||||
"aws-cdk.cloud-assembly-schema" = ">=39.2.0,<40.0.0"
|
||||
constructs = ">=10.0.0,<11.0.0"
|
||||
@ -449,13 +466,13 @@ typing-extensions = ">=4.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "bandit"
|
||||
version = "1.8.3"
|
||||
version = "1.8.2"
|
||||
description = "Security oriented static analyser for python code."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "bandit-1.8.3-py3-none-any.whl", hash = "sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8"},
|
||||
{file = "bandit-1.8.3.tar.gz", hash = "sha256:f5847beb654d309422985c36644649924e0ea4425c76dec2e89110b87506193a"},
|
||||
{file = "bandit-1.8.2-py3-none-any.whl", hash = "sha256:df6146ad73dd30e8cbda4e29689ddda48364e36ff655dbfc86998401fcf1721f"},
|
||||
{file = "bandit-1.8.2.tar.gz", hash = "sha256:e00ad5a6bc676c0954669fe13818024d66b70e42cf5adb971480cf3b671e835f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1400,13 +1417,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "django-filter"
|
||||
version = "25.1"
|
||||
version = "24.3"
|
||||
description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "django_filter-25.1-py3-none-any.whl", hash = "sha256:4fa48677cf5857b9b1347fed23e355ea792464e0fe07244d1fdfb8a806215b80"},
|
||||
{file = "django_filter-25.1.tar.gz", hash = "sha256:1ec9eef48fa8da1c0ac9b411744b16c3f4c31176c867886e4c48da369c407153"},
|
||||
{file = "django_filter-24.3-py3-none-any.whl", hash = "sha256:c4852822928ce17fb699bcfccd644b3574f1a2d80aeb2b4ff4f16b02dd49dc64"},
|
||||
{file = "django_filter-24.3.tar.gz", hash = "sha256:d8ccaf6732afd21ca0542f6733b11591030fa98669f8d15599b358e24a2cd9c3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1503,13 +1520,13 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "django-storages"
|
||||
version = "1.14.5"
|
||||
version = "1.14.4"
|
||||
description = "Support for many storage backends in Django"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "django_storages-1.14.5-py3-none-any.whl", hash = "sha256:5ce9c69426f24f379821fd688442314e4aa03de87ae43183c4e16915f4c165d4"},
|
||||
{file = "django_storages-1.14.5.tar.gz", hash = "sha256:ace80dbee311258453e30cd5cfd91096b834180ccf09bc1f4d2cb6d38d68571a"},
|
||||
{file = "django-storages-1.14.4.tar.gz", hash = "sha256:69aca94d26e6714d14ad63f33d13619e697508ee33ede184e462ed766dc2a73f"},
|
||||
{file = "django_storages-1.14.4-py3-none-any.whl", hash = "sha256:d61930acb4a25e3aebebc6addaf946a3b1df31c803a6bf1af2f31c9047febaa3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -1520,7 +1537,7 @@ Django = ">=3.2"
|
||||
azure = ["azure-core (>=1.13)", "azure-storage-blob (>=12)"]
|
||||
boto3 = ["boto3 (>=1.4.4)"]
|
||||
dropbox = ["dropbox (>=7.2.1)"]
|
||||
google = ["google-cloud-storage (>=1.32)"]
|
||||
google = ["google-cloud-storage (>=1.27)"]
|
||||
libcloud = ["apache-libcloud"]
|
||||
s3 = ["boto3 (>=1.4.4)"]
|
||||
sftp = ["paramiko (>=1.15)"]
|
||||
@ -1867,17 +1884,6 @@ files = [
|
||||
{file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "geographiclib"
|
||||
version = "2.0"
|
||||
description = "The geodesic routines from GeographicLib"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "geographiclib-2.0-py3-none-any.whl", hash = "sha256:6b7225248e45ff7edcee32becc4e0a1504c606ac5ee163a5656d482e0cd38734"},
|
||||
{file = "geographiclib-2.0.tar.gz", hash = "sha256:f7f41c85dc3e1c2d3d935ec86660dc3b2c848c83e17f9a9e51ba9d5146a15859"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "geoip2"
|
||||
version = "5.0.1"
|
||||
@ -1897,29 +1903,6 @@ requests = ">=2.24.0,<3.0.0"
|
||||
[package.extras]
|
||||
test = ["pytest-httpserver (>=1.0.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "geopy"
|
||||
version = "2.4.1"
|
||||
description = "Python Geocoding Toolbox"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "geopy-2.4.1-py3-none-any.whl", hash = "sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7"},
|
||||
{file = "geopy-2.4.1.tar.gz", hash = "sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
geographiclib = ">=1.52,<3"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp"]
|
||||
dev = ["coverage", "flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"]
|
||||
dev-docs = ["readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"]
|
||||
dev-lint = ["flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)"]
|
||||
dev-test = ["coverage", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "sphinx (<=4.3.2)"]
|
||||
requests = ["requests (>=2.16.2)", "urllib3 (>=1.24.2)"]
|
||||
timezone = ["pytz"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.19.1"
|
||||
@ -4628,13 +4611,13 @@ websocket-client = ">=1.8,<2.0"
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.22.0"
|
||||
version = "2.21.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66"},
|
||||
{file = "sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944"},
|
||||
{file = "sentry_sdk-2.21.0-py2.py3-none-any.whl", hash = "sha256:7623cfa9e2c8150948a81ca253b8e2bfe4ce0b96ab12f8cd78e3ac9c490fd92f"},
|
||||
{file = "sentry_sdk-2.21.0.tar.gz", hash = "sha256:a6d38e0fb35edda191acf80b188ec713c863aaa5ad8d5798decb8671d02077b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -4678,7 +4661,6 @@ sanic = ["sanic (>=0.8)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||
starlette = ["starlette (>=0.19.1)"]
|
||||
starlite = ["starlite (>=1.48)"]
|
||||
statsig = ["statsig (>=0.55.3)"]
|
||||
tornado = ["tornado (>=6)"]
|
||||
unleash = ["UnleashClient (>=6.0.1)"]
|
||||
|
||||
@ -5865,4 +5847,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "~3.12"
|
||||
content-hash = "8a6bfd4833e415a9f4f613ab4f33e60c8332b9f5743583222cdb7190f6286216"
|
||||
content-hash = "a3915ac2ef2bb53f7cd67070912cdaf717c3bf73ed972fa337a9b07fce162451"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user