Compare commits
22 Commits
version/20
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
e8b5e4c127 | |||
81ec98b198 | |||
c46ab19e79 | |||
de9fc5de6b | |||
eab3d9b411 | |||
7cb40d786f | |||
b4fce08bbc | |||
8a2ba1c518 | |||
25b4306693 | |||
1e279950f1 | |||
960429355f | |||
b4f3748353 | |||
91d2445c61 | |||
dd8f809161 | |||
57a31b5dd1 | |||
09125b6236 | |||
832126c6fe | |||
25fe489b34 | |||
18078fd68f | |||
4fa71d995d | |||
22cec64234 | |||
a87cc27366 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2024.8.2
|
||||
current_version = 2024.8.4
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2024.8.2"
|
||||
__version__ = "2024.8.4"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -51,9 +51,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(
|
||||
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
|
||||
[
|
||||
_("Failed to validate blueprint"),
|
||||
*[f"- {x.event}" for x in logs],
|
||||
]
|
||||
)
|
||||
return content
|
||||
|
||||
|
@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
|
||||
)
|
||||
|
@ -429,7 +429,7 @@ class Importer:
|
||||
orig_import = deepcopy(self._import)
|
||||
if self._import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
|
@ -679,7 +679,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
LOGGER.debug("User attempted to impersonate", user=request.user)
|
||||
return Response(status=401)
|
||||
user_to_be = self.get_object()
|
||||
if not request.user.has_perm("impersonate", user_to_be):
|
||||
# Check both object-level perms and global perms
|
||||
if not request.user.has_perm(
|
||||
"authentik_core.impersonate", user_to_be
|
||||
) and not request.user.has_perm("authentik_core.impersonate"):
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
|
@ -44,6 +44,26 @@ class TestImpersonation(APITestCase):
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
self.assertNotIn("original", response_body)
|
||||
|
||||
def test_impersonate_global(self):
|
||||
"""Test impersonation with global permissions"""
|
||||
new_user = create_test_user()
|
||||
assign_perm("authentik_core.impersonate", new_user)
|
||||
assign_perm("authentik_core.view_user", new_user)
|
||||
self.client.force_login(new_user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:user-impersonate",
|
||||
kwargs={"pk": self.other_user.pk},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
self.assertEqual(response_body["original"]["username"], new_user.username)
|
||||
|
||||
def test_impersonate_scoped(self):
|
||||
"""Test impersonation with scoped permissions"""
|
||||
new_user = create_test_user()
|
||||
|
@ -49,6 +49,7 @@ from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.tenants.models import Tenant
|
||||
from authentik.tenants.utils import get_current_tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
DISCORD_FIELD_LIMIT = 25
|
||||
@ -58,7 +59,11 @@ NOTIFICATION_SUMMARY_LENGTH = 75
|
||||
def default_event_duration():
|
||||
"""Default duration an Event is saved.
|
||||
This is used as a fallback when no brand is available"""
|
||||
return now() + timedelta(days=365)
|
||||
try:
|
||||
tenant = get_current_tenant()
|
||||
return now() + timedelta_from_string(tenant.event_retention)
|
||||
except Tenant.DoesNotExist:
|
||||
return now() + timedelta(days=365)
|
||||
|
||||
|
||||
def default_brand():
|
||||
@ -245,12 +250,6 @@ class Event(SerializerModel, ExpiringModel):
|
||||
if QS_QUERY in self.context["http_request"]["args"]:
|
||||
wrapped = self.context["http_request"]["args"][QS_QUERY]
|
||||
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
|
||||
if hasattr(request, "tenant"):
|
||||
tenant: Tenant = request.tenant
|
||||
# Because self.created only gets set on save, we can't use it's value here
|
||||
# hence we set self.created to now and then use it
|
||||
self.created = now()
|
||||
self.expires = self.created + timedelta_from_string(tenant.event_retention)
|
||||
if hasattr(request, "brand"):
|
||||
brand: Brand = request.brand
|
||||
self.brand = sanitize_dict(model_to_dict(brand))
|
||||
|
@ -6,6 +6,7 @@ from django.db.models import Model
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.models import default_token_key
|
||||
from authentik.events.models import default_event_duration
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
|
||||
|
||||
@ -20,7 +21,7 @@ def model_tester_factory(test_model: type[Model]) -> Callable:
|
||||
allowed = 0
|
||||
# Token-like objects need to lookup the current tenant to get the default token length
|
||||
for field in test_model._meta.fields:
|
||||
if field.default == default_token_key:
|
||||
if field.default in [default_token_key, default_event_duration]:
|
||||
allowed += 1
|
||||
with self.assertNumQueries(allowed):
|
||||
str(test_model())
|
||||
|
@ -30,6 +30,11 @@ class TestHTTP(TestCase):
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.2")
|
||||
|
||||
def test_forward_for_invalid(self):
|
||||
"""Test invalid forward for"""
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="foobar")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), ClientIPMiddleware.default_ip)
|
||||
|
||||
def test_fake_outpost(self):
|
||||
"""Test faked IP which is overridden by an outpost"""
|
||||
token = Token.objects.create(
|
||||
@ -53,6 +58,17 @@ class TestHTTP(TestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
# Invalid, not a real IP
|
||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
self.user.save()
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
ClientIPMiddleware.outpost_remote_ip_header: "foobar",
|
||||
ClientIPMiddleware.outpost_token_header: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
# Valid
|
||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
self.user.save()
|
||||
|
@ -21,7 +21,14 @@ class DebugSession(Session):
|
||||
|
||||
def send(self, req: PreparedRequest, *args, **kwargs):
|
||||
request_id = str(uuid4())
|
||||
LOGGER.debug("HTTP request sent", uid=request_id, path=req.path_url, headers=req.headers)
|
||||
LOGGER.debug(
|
||||
"HTTP request sent",
|
||||
uid=request_id,
|
||||
url=req.url,
|
||||
method=req.method,
|
||||
headers=req.headers,
|
||||
body=req.body,
|
||||
)
|
||||
resp = super().send(req, *args, **kwargs)
|
||||
LOGGER.debug(
|
||||
"HTTP response received",
|
||||
|
@ -108,7 +108,7 @@ class EventMatcherPolicy(Policy):
|
||||
result=result,
|
||||
)
|
||||
matches.append(result)
|
||||
passing = any(x.passing for x in matches)
|
||||
passing = all(x.passing for x in matches)
|
||||
messages = chain(*[x.messages for x in matches])
|
||||
result = PolicyResult(passing, *messages)
|
||||
result.source_results = matches
|
||||
|
@ -77,11 +77,24 @@ class TestEventMatcherPolicy(TestCase):
|
||||
request = PolicyRequest(get_anonymous_user())
|
||||
request.context["event"] = event
|
||||
policy: EventMatcherPolicy = EventMatcherPolicy.objects.create(
|
||||
client_ip="1.2.3.5", app="bar"
|
||||
client_ip="1.2.3.5", app="foo"
|
||||
)
|
||||
response = policy.passes(request)
|
||||
self.assertFalse(response.passing)
|
||||
|
||||
def test_multiple(self):
|
||||
"""Test multiple"""
|
||||
event = Event.new(EventAction.LOGIN)
|
||||
event.app = "foo"
|
||||
event.client_ip = "1.2.3.4"
|
||||
request = PolicyRequest(get_anonymous_user())
|
||||
request.context["event"] = event
|
||||
policy: EventMatcherPolicy = EventMatcherPolicy.objects.create(
|
||||
client_ip="1.2.3.4", app="foo"
|
||||
)
|
||||
response = policy.passes(request)
|
||||
self.assertTrue(response.passing)
|
||||
|
||||
def test_invalid(self):
|
||||
"""Test passing event"""
|
||||
request = PolicyRequest(get_anonymous_user())
|
||||
|
@ -29,7 +29,6 @@ class TesOAuth2Introspection(OAuthTestCase):
|
||||
self.app = Application.objects.create(
|
||||
name=generate_id(), slug=generate_id(), provider=self.provider
|
||||
)
|
||||
self.app.save()
|
||||
self.user = create_test_admin_user()
|
||||
self.auth = b64encode(
|
||||
f"{self.provider.client_id}:{self.provider.client_secret}".encode()
|
||||
@ -114,6 +113,41 @@ class TesOAuth2Introspection(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_introspect_invalid_provider(self):
|
||||
"""Test introspection (mismatched provider and token)"""
|
||||
provider: OAuth2Provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="",
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
auth = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
||||
|
||||
token: AccessToken = AccessToken.objects.create(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="openid user profile",
|
||||
_id_token=json.dumps(
|
||||
asdict(
|
||||
IDToken("foo", "bar"),
|
||||
)
|
||||
),
|
||||
)
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token-introspection"),
|
||||
HTTP_AUTHORIZATION=f"Basic {auth}",
|
||||
data={"token": token.token},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{
|
||||
"active": False,
|
||||
},
|
||||
)
|
||||
|
||||
def test_introspect_invalid_auth(self):
|
||||
"""Test introspect (invalid auth)"""
|
||||
res = self.client.post(
|
||||
|
@ -46,10 +46,10 @@ class TokenIntrospectionParams:
|
||||
if not provider:
|
||||
raise TokenIntrospectionError
|
||||
|
||||
access_token = AccessToken.objects.filter(token=raw_token).first()
|
||||
access_token = AccessToken.objects.filter(token=raw_token, provider=provider).first()
|
||||
if access_token:
|
||||
return TokenIntrospectionParams(access_token, provider)
|
||||
refresh_token = RefreshToken.objects.filter(token=raw_token).first()
|
||||
refresh_token = RefreshToken.objects.filter(token=raw_token, provider=provider).first()
|
||||
if refresh_token:
|
||||
return TokenIntrospectionParams(refresh_token, provider)
|
||||
LOGGER.debug("Token does not exist", token=raw_token)
|
||||
|
@ -439,15 +439,14 @@ class TokenParams:
|
||||
# (22 chars being the length of the "template")
|
||||
username=f"ak-{self.provider.name[:150-22]}-client_credentials",
|
||||
defaults={
|
||||
"attributes": {
|
||||
USER_ATTRIBUTE_GENERATED: True,
|
||||
},
|
||||
"last_login": timezone.now(),
|
||||
"name": f"Autogenerated user from application {app.name} (client credentials)",
|
||||
"path": f"{USER_PATH_SYSTEM_PREFIX}/apps/{app.slug}",
|
||||
"type": UserTypes.SERVICE_ACCOUNT,
|
||||
},
|
||||
)
|
||||
self.user.attributes[USER_ATTRIBUTE_GENERATED] = True
|
||||
self.user.save()
|
||||
self.__check_policy_access(app, request)
|
||||
|
||||
Event.new(
|
||||
@ -471,9 +470,6 @@ class TokenParams:
|
||||
self.user, created = User.objects.update_or_create(
|
||||
username=f"{self.provider.name}-{token.get('sub')}",
|
||||
defaults={
|
||||
"attributes": {
|
||||
USER_ATTRIBUTE_GENERATED: True,
|
||||
},
|
||||
"last_login": timezone.now(),
|
||||
"name": (
|
||||
f"Autogenerated user from application {app.name} (client credentials JWT)"
|
||||
@ -482,6 +478,8 @@ class TokenParams:
|
||||
"type": UserTypes.SERVICE_ACCOUNT,
|
||||
},
|
||||
)
|
||||
self.user.attributes[USER_ATTRIBUTE_GENERATED] = True
|
||||
self.user.save()
|
||||
exp = token.get("exp")
|
||||
if created and exp:
|
||||
self.user.attributes[USER_ATTRIBUTE_EXPIRES] = exp
|
||||
|
@ -28,7 +28,7 @@ class ProxyDockerController(DockerController):
|
||||
labels = super()._get_labels()
|
||||
labels["traefik.enable"] = "true"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = (
|
||||
f"({' || '.join([f'Host(`{host}`)' for host in hosts])})"
|
||||
f"({' || '.join([f'Host({host})' for host in hosts])})"
|
||||
f" && PathPrefix(`/outpost.goauthentik.io`)"
|
||||
)
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||
|
@ -50,6 +50,7 @@ class AssertionProcessor:
|
||||
|
||||
_issue_instant: str
|
||||
_assertion_id: str
|
||||
_response_id: str
|
||||
|
||||
_valid_not_before: str
|
||||
_session_not_on_or_after: str
|
||||
@ -62,6 +63,7 @@ class AssertionProcessor:
|
||||
|
||||
self._issue_instant = get_time_string()
|
||||
self._assertion_id = get_random_id()
|
||||
self._response_id = get_random_id()
|
||||
|
||||
self._valid_not_before = get_time_string(
|
||||
timedelta_from_string(self.provider.assertion_valid_not_before)
|
||||
@ -130,7 +132,9 @@ class AssertionProcessor:
|
||||
"""Generate AuthnStatement with AuthnContext and ContextClassRef Elements."""
|
||||
auth_n_statement = Element(f"{{{NS_SAML_ASSERTION}}}AuthnStatement")
|
||||
auth_n_statement.attrib["AuthnInstant"] = self._valid_not_before
|
||||
auth_n_statement.attrib["SessionIndex"] = self._assertion_id
|
||||
auth_n_statement.attrib["SessionIndex"] = sha256(
|
||||
self.http_request.session.session_key.encode("ascii")
|
||||
).hexdigest()
|
||||
auth_n_statement.attrib["SessionNotOnOrAfter"] = self._session_not_on_or_after
|
||||
|
||||
auth_n_context = SubElement(auth_n_statement, f"{{{NS_SAML_ASSERTION}}}AuthnContext")
|
||||
@ -285,7 +289,7 @@ class AssertionProcessor:
|
||||
response.attrib["Version"] = "2.0"
|
||||
response.attrib["IssueInstant"] = self._issue_instant
|
||||
response.attrib["Destination"] = self.provider.acs_url
|
||||
response.attrib["ID"] = get_random_id()
|
||||
response.attrib["ID"] = self._response_id
|
||||
if self.auth_n_request.id:
|
||||
response.attrib["InResponseTo"] = self.auth_n_request.id
|
||||
|
||||
@ -308,7 +312,7 @@ class AssertionProcessor:
|
||||
ref = xmlsec.template.add_reference(
|
||||
signature_node,
|
||||
digest_algorithm_transform,
|
||||
uri="#" + self._assertion_id,
|
||||
uri="#" + element.attrib["ID"],
|
||||
)
|
||||
xmlsec.template.add_transform(ref, xmlsec.constants.TransformEnveloped)
|
||||
xmlsec.template.add_transform(ref, xmlsec.constants.TransformExclC14N)
|
||||
|
@ -180,6 +180,10 @@ class TestAuthNRequest(TestCase):
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
# Ensure both response and assertion ID are in the response twice (once as ID attribute,
|
||||
# once as ds:Reference URI)
|
||||
self.assertEqual(response.count(response_proc._assertion_id), 2)
|
||||
self.assertEqual(response.count(response_proc._response_id), 2)
|
||||
|
||||
# Now parse the response (source)
|
||||
http_request.POST = QueryDict(mutable=True)
|
||||
|
@ -2,9 +2,10 @@
|
||||
|
||||
from itertools import batched
|
||||
|
||||
from django.db import transaction
|
||||
from pydantic import ValidationError
|
||||
from pydanticscim.group import GroupMember
|
||||
from pydanticscim.responses import PatchOp, PatchOperation
|
||||
from pydanticscim.responses import PatchOp
|
||||
|
||||
from authentik.core.models import Group
|
||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||
@ -19,7 +20,7 @@ from authentik.providers.scim.clients.base import SCIMClient
|
||||
from authentik.providers.scim.clients.exceptions import (
|
||||
SCIMRequestException,
|
||||
)
|
||||
from authentik.providers.scim.clients.schema import SCIM_GROUP_SCHEMA, PatchRequest
|
||||
from authentik.providers.scim.clients.schema import SCIM_GROUP_SCHEMA, PatchOperation, PatchRequest
|
||||
from authentik.providers.scim.clients.schema import Group as SCIMGroupSchema
|
||||
from authentik.providers.scim.models import (
|
||||
SCIMMapping,
|
||||
@ -104,13 +105,47 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
provider=self.provider, group=group, scim_id=scim_id
|
||||
)
|
||||
users = list(group.users.order_by("id").values_list("id", flat=True))
|
||||
self._patch_add_users(group, users)
|
||||
self._patch_add_users(connection, users)
|
||||
return connection
|
||||
|
||||
def update(self, group: Group, connection: SCIMProviderGroup):
|
||||
"""Update existing group"""
|
||||
scim_group = self.to_schema(group, connection)
|
||||
scim_group.id = connection.scim_id
|
||||
try:
|
||||
if self._config.patch.supported:
|
||||
return self._update_patch(group, scim_group, connection)
|
||||
return self._update_put(group, scim_group, connection)
|
||||
except NotFoundSyncException:
|
||||
# Resource missing is handled by self.write, which will re-create the group
|
||||
raise
|
||||
|
||||
def _update_patch(
|
||||
self, group: Group, scim_group: SCIMGroupSchema, connection: SCIMProviderGroup
|
||||
):
|
||||
"""Update a group via PATCH request"""
|
||||
# Patch group's attributes instead of replacing it and re-adding users if we can
|
||||
self._request(
|
||||
"PATCH",
|
||||
f"/Groups/{connection.scim_id}",
|
||||
json=PatchRequest(
|
||||
Operations=[
|
||||
PatchOperation(
|
||||
op=PatchOp.replace,
|
||||
path=None,
|
||||
value=scim_group.model_dump(mode="json", exclude_unset=True),
|
||||
)
|
||||
]
|
||||
).model_dump(
|
||||
mode="json",
|
||||
exclude_unset=True,
|
||||
exclude_none=True,
|
||||
),
|
||||
)
|
||||
return self.patch_compare_users(group)
|
||||
|
||||
def _update_put(self, group: Group, scim_group: SCIMGroupSchema, connection: SCIMProviderGroup):
|
||||
"""Update a group via PUT request"""
|
||||
try:
|
||||
self._request(
|
||||
"PUT",
|
||||
@ -120,33 +155,25 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
exclude_unset=True,
|
||||
),
|
||||
)
|
||||
users = list(group.users.order_by("id").values_list("id", flat=True))
|
||||
return self._patch_add_users(group, users)
|
||||
except NotFoundSyncException:
|
||||
# Resource missing is handled by self.write, which will re-create the group
|
||||
raise
|
||||
return self.patch_compare_users(group)
|
||||
except (SCIMRequestException, ObjectExistsSyncException):
|
||||
# Some providers don't support PUT on groups, so this is mainly a fix for the initial
|
||||
# sync, send patch add requests for all the users the group currently has
|
||||
users = list(group.users.order_by("id").values_list("id", flat=True))
|
||||
self._patch_add_users(group, users)
|
||||
# Also update the group name
|
||||
return self._patch(
|
||||
scim_group.id,
|
||||
PatchOperation(
|
||||
op=PatchOp.replace,
|
||||
path="displayName",
|
||||
value=scim_group.displayName,
|
||||
),
|
||||
)
|
||||
return self._update_patch(group, scim_group, connection)
|
||||
|
||||
def update_group(self, group: Group, action: Direction, users_set: set[int]):
|
||||
"""Update a group, either using PUT to replace it or PATCH if supported"""
|
||||
scim_group = SCIMProviderGroup.objects.filter(provider=self.provider, group=group).first()
|
||||
if not scim_group:
|
||||
self.logger.warning(
|
||||
"could not sync group membership, group does not exist", group=group
|
||||
)
|
||||
return
|
||||
if self._config.patch.supported:
|
||||
if action == Direction.add:
|
||||
return self._patch_add_users(group, users_set)
|
||||
return self._patch_add_users(scim_group, users_set)
|
||||
if action == Direction.remove:
|
||||
return self._patch_remove_users(group, users_set)
|
||||
return self._patch_remove_users(scim_group, users_set)
|
||||
try:
|
||||
return self.write(group)
|
||||
except SCIMRequestException as exc:
|
||||
@ -154,19 +181,24 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
# Assume that provider does not support PUT and also doesn't support
|
||||
# ServiceProviderConfig, so try PATCH as a fallback
|
||||
if action == Direction.add:
|
||||
return self._patch_add_users(group, users_set)
|
||||
return self._patch_add_users(scim_group, users_set)
|
||||
if action == Direction.remove:
|
||||
return self._patch_remove_users(group, users_set)
|
||||
return self._patch_remove_users(scim_group, users_set)
|
||||
raise exc
|
||||
|
||||
def _patch(
|
||||
def _patch_chunked(
|
||||
self,
|
||||
group_id: str,
|
||||
*ops: PatchOperation,
|
||||
):
|
||||
"""Helper function that chunks patch requests based on the maxOperations attribute.
|
||||
This is not strictly according to specs but there's nothing in the schema that allows the
|
||||
us to know what the maximum patch operations per request should be."""
|
||||
chunk_size = self._config.bulk.maxOperations
|
||||
if chunk_size < 1:
|
||||
chunk_size = len(ops)
|
||||
if len(ops) < 1:
|
||||
return
|
||||
for chunk in batched(ops, chunk_size):
|
||||
req = PatchRequest(Operations=list(chunk))
|
||||
self._request(
|
||||
@ -177,16 +209,70 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
),
|
||||
)
|
||||
|
||||
def _patch_add_users(self, group: Group, users_set: set[int]):
|
||||
"""Add users in users_set to group"""
|
||||
if len(users_set) < 1:
|
||||
return
|
||||
@transaction.atomic
|
||||
def patch_compare_users(self, group: Group):
|
||||
"""Compare users with a SCIM group and add/remove any differences"""
|
||||
# Get scim group first
|
||||
scim_group = SCIMProviderGroup.objects.filter(provider=self.provider, group=group).first()
|
||||
if not scim_group:
|
||||
self.logger.warning(
|
||||
"could not sync group membership, group does not exist", group=group
|
||||
)
|
||||
return
|
||||
# Get a list of all users in the authentik group
|
||||
raw_users_should = list(group.users.order_by("id").values_list("id", flat=True))
|
||||
# Lookup the SCIM IDs of the users
|
||||
users_should: list[str] = list(
|
||||
SCIMProviderUser.objects.filter(
|
||||
user__pk__in=raw_users_should, provider=self.provider
|
||||
).values_list("scim_id", flat=True)
|
||||
)
|
||||
if len(raw_users_should) != len(users_should):
|
||||
self.logger.warning(
|
||||
"User count mismatch, not all users in the group are synced to SCIM yet.",
|
||||
group=group,
|
||||
)
|
||||
# Get current group status
|
||||
current_group = SCIMGroupSchema.model_validate(
|
||||
self._request("GET", f"/Groups/{scim_group.scim_id}")
|
||||
)
|
||||
users_to_add = []
|
||||
users_to_remove = []
|
||||
# Check users currently in group and if they shouldn't be in the group and remove them
|
||||
for user in current_group.members or []:
|
||||
if user.value not in users_should:
|
||||
users_to_remove.append(user.value)
|
||||
# Check users that should be in the group and add them
|
||||
for user in users_should:
|
||||
if len([x for x in current_group.members if x.value == user]) < 1:
|
||||
users_to_add.append(user)
|
||||
# Only send request if we need to make changes
|
||||
if len(users_to_add) < 1 and len(users_to_remove) < 1:
|
||||
return
|
||||
return self._patch_chunked(
|
||||
scim_group.scim_id,
|
||||
*[
|
||||
PatchOperation(
|
||||
op=PatchOp.add,
|
||||
path="members",
|
||||
value=[{"value": x}],
|
||||
)
|
||||
for x in users_to_add
|
||||
],
|
||||
*[
|
||||
PatchOperation(
|
||||
op=PatchOp.remove,
|
||||
path="members",
|
||||
value=[{"value": x}],
|
||||
)
|
||||
for x in users_to_remove
|
||||
],
|
||||
)
|
||||
|
||||
def _patch_add_users(self, scim_group: SCIMProviderGroup, users_set: set[int]):
|
||||
"""Add users in users_set to group"""
|
||||
if len(users_set) < 1:
|
||||
return
|
||||
user_ids = list(
|
||||
SCIMProviderUser.objects.filter(
|
||||
user__pk__in=users_set, provider=self.provider
|
||||
@ -194,7 +280,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
)
|
||||
if len(user_ids) < 1:
|
||||
return
|
||||
self._patch(
|
||||
self._patch_chunked(
|
||||
scim_group.scim_id,
|
||||
*[
|
||||
PatchOperation(
|
||||
@ -206,16 +292,10 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
],
|
||||
)
|
||||
|
||||
def _patch_remove_users(self, group: Group, users_set: set[int]):
|
||||
def _patch_remove_users(self, scim_group: SCIMProviderGroup, users_set: set[int]):
|
||||
"""Remove users in users_set from group"""
|
||||
if len(users_set) < 1:
|
||||
return
|
||||
scim_group = SCIMProviderGroup.objects.filter(provider=self.provider, group=group).first()
|
||||
if not scim_group:
|
||||
self.logger.warning(
|
||||
"could not sync group membership, group does not exist", group=group
|
||||
)
|
||||
return
|
||||
user_ids = list(
|
||||
SCIMProviderUser.objects.filter(
|
||||
user__pk__in=users_set, provider=self.provider
|
||||
@ -223,7 +303,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
||||
)
|
||||
if len(user_ids) < 1:
|
||||
return
|
||||
self._patch(
|
||||
self._patch_chunked(
|
||||
scim_group.scim_id,
|
||||
*[
|
||||
PatchOperation(
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from pydantic import Field
|
||||
from pydanticscim.group import Group as BaseGroup
|
||||
from pydanticscim.responses import PatchOperation as BasePatchOperation
|
||||
from pydanticscim.responses import PatchRequest as BasePatchRequest
|
||||
from pydanticscim.responses import SCIMError as BaseSCIMError
|
||||
from pydanticscim.service_provider import Bulk as BaseBulk
|
||||
@ -68,6 +69,12 @@ class PatchRequest(BasePatchRequest):
|
||||
schemas: tuple[str] = ("urn:ietf:params:scim:api:messages:2.0:PatchOp",)
|
||||
|
||||
|
||||
class PatchOperation(BasePatchOperation):
|
||||
"""PatchOperation with optional path"""
|
||||
|
||||
path: str | None
|
||||
|
||||
|
||||
class SCIMError(BaseSCIMError):
|
||||
"""SCIM error with optional status code"""
|
||||
|
||||
|
@ -252,3 +252,118 @@ class SCIMMembershipTests(TestCase):
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_member_add_save(self):
|
||||
"""Test member add + save"""
|
||||
config = ServiceProviderConfiguration.default()
|
||||
|
||||
config.patch.supported = True
|
||||
user_scim_id = generate_id()
|
||||
group_scim_id = generate_id()
|
||||
uid = generate_id()
|
||||
group = Group.objects.create(
|
||||
name=uid,
|
||||
)
|
||||
|
||||
user = User.objects.create(username=generate_id())
|
||||
|
||||
# Test initial sync of group creation
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://localhost/ServiceProviderConfig",
|
||||
json=config.model_dump(),
|
||||
)
|
||||
mocker.post(
|
||||
"https://localhost/Users",
|
||||
json={
|
||||
"id": user_scim_id,
|
||||
},
|
||||
)
|
||||
mocker.post(
|
||||
"https://localhost/Groups",
|
||||
json={
|
||||
"id": group_scim_id,
|
||||
},
|
||||
)
|
||||
|
||||
self.configure()
|
||||
sync_tasks.trigger_single_task(self.provider, scim_sync).get()
|
||||
|
||||
self.assertEqual(mocker.call_count, 6)
|
||||
self.assertEqual(mocker.request_history[0].method, "GET")
|
||||
self.assertEqual(mocker.request_history[1].method, "GET")
|
||||
self.assertEqual(mocker.request_history[2].method, "GET")
|
||||
self.assertEqual(mocker.request_history[3].method, "POST")
|
||||
self.assertEqual(mocker.request_history[4].method, "GET")
|
||||
self.assertEqual(mocker.request_history[5].method, "POST")
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[3].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"emails": [],
|
||||
"active": True,
|
||||
"externalId": user.uid,
|
||||
"name": {"familyName": " ", "formatted": " ", "givenName": ""},
|
||||
"displayName": "",
|
||||
"userName": user.username,
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[5].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
"displayName": group.name,
|
||||
},
|
||||
)
|
||||
|
||||
with Mocker() as mocker:
|
||||
mocker.get(
|
||||
"https://localhost/ServiceProviderConfig",
|
||||
json=config.model_dump(),
|
||||
)
|
||||
mocker.get(
|
||||
f"https://localhost/Groups/{group_scim_id}",
|
||||
json={},
|
||||
)
|
||||
mocker.patch(
|
||||
f"https://localhost/Groups/{group_scim_id}",
|
||||
json={},
|
||||
)
|
||||
group.users.add(user)
|
||||
group.save()
|
||||
self.assertEqual(mocker.call_count, 5)
|
||||
self.assertEqual(mocker.request_history[0].method, "GET")
|
||||
self.assertEqual(mocker.request_history[1].method, "PATCH")
|
||||
self.assertEqual(mocker.request_history[2].method, "GET")
|
||||
self.assertEqual(mocker.request_history[3].method, "PATCH")
|
||||
self.assertEqual(mocker.request_history[4].method, "GET")
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[1].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "add",
|
||||
"path": "members",
|
||||
"value": [{"value": user_scim_id}],
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
mocker.request_history[3].body,
|
||||
{
|
||||
"Operations": [
|
||||
{
|
||||
"op": "replace",
|
||||
"value": {
|
||||
"id": group_scim_id,
|
||||
"displayName": group.name,
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
|
||||
"externalId": str(group.pk),
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from hashlib import sha512
|
||||
from ipaddress import ip_address
|
||||
from time import perf_counter, time
|
||||
from typing import Any
|
||||
|
||||
@ -174,6 +175,7 @@ class ClientIPMiddleware:
|
||||
|
||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||
self.get_response = get_response
|
||||
self.logger = get_logger().bind()
|
||||
|
||||
def _get_client_ip_from_meta(self, meta: dict[str, Any]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
@ -185,11 +187,16 @@ class ClientIPMiddleware:
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"REMOTE_ADDR",
|
||||
)
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
return ips[0].strip()
|
||||
return self.default_ip
|
||||
try:
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
# Ensure the IP parses as a valid IP
|
||||
return str(ip_address(ips[0].strip()))
|
||||
return self.default_ip
|
||||
except ValueError as exc:
|
||||
self.logger.debug("Invalid remote IP", exc=exc)
|
||||
return self.default_ip
|
||||
|
||||
# FIXME: this should probably not be in `root` but rather in a middleware in `outposts`
|
||||
# but for now it's fine
|
||||
@ -226,7 +233,11 @@ class ClientIPMiddleware:
|
||||
Scope.get_isolation_scope().set_user(user)
|
||||
# Set the outpost service account on the request
|
||||
setattr(request, self.request_attr_outpost_user, user)
|
||||
return delegated_ip
|
||||
try:
|
||||
return str(ip_address(delegated_ip))
|
||||
except ValueError as exc:
|
||||
self.logger.debug("Invalid remote IP from Outpost", exc=exc)
|
||||
return None
|
||||
|
||||
def _get_client_ip(self, request: HttpRequest | None) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
|
@ -3,6 +3,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
@ -39,9 +40,8 @@ class LDAPSourceSerializer(SourceSerializer):
|
||||
"""Get cached source connectivity"""
|
||||
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
def validate_sync_users_password(self, sync_users_password: bool) -> bool:
|
||||
"""Check that only a single source has password_sync on"""
|
||||
sync_users_password = attrs.get("sync_users_password", True)
|
||||
if sync_users_password:
|
||||
sources = LDAPSource.objects.filter(sync_users_password=True)
|
||||
if self.instance:
|
||||
@ -49,11 +49,31 @@ class LDAPSourceSerializer(SourceSerializer):
|
||||
if sources.exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"sync_users_password": (
|
||||
"sync_users_password": _(
|
||||
"Only a single LDAP Source with password synchronization is allowed"
|
||||
)
|
||||
}
|
||||
)
|
||||
return sync_users_password
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate property mappings with sync_ flags"""
|
||||
types = ["user", "group"]
|
||||
for type in types:
|
||||
toggle_value = attrs.get(f"sync_{type}s", False)
|
||||
mappings_field = f"{type}_property_mappings"
|
||||
mappings_value = attrs.get(mappings_field, [])
|
||||
if toggle_value and len(mappings_value) == 0:
|
||||
raise ValidationError(
|
||||
{
|
||||
mappings_field: _(
|
||||
(
|
||||
"When 'Sync {type}s' is enabled, '{type}s property "
|
||||
"mappings' cannot be empty."
|
||||
).format(type=type)
|
||||
)
|
||||
}
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
@ -166,11 +186,12 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
for sync_class in SYNC_CLASSES:
|
||||
class_name = sync_class.name()
|
||||
all_objects.setdefault(class_name, [])
|
||||
for obj in sync_class(source).get_objects(size_limit=10):
|
||||
obj: dict
|
||||
obj.pop("raw_attributes", None)
|
||||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
for page in sync_class(source).get_objects(size_limit=10):
|
||||
for obj in page:
|
||||
obj: dict
|
||||
obj.pop("raw_attributes", None)
|
||||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
return Response(data=all_objects)
|
||||
|
||||
|
||||
|
@ -26,17 +26,16 @@ def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
||||
"""Ensure that source is synced on save (if enabled)"""
|
||||
if not instance.enabled:
|
||||
return
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
||||
# - the user forgets to set them or
|
||||
# - the source is newly created, this is the first save event
|
||||
# and the mappings are created with an m2m event
|
||||
if (
|
||||
not instance.user_property_mappings.exists()
|
||||
or not instance.group_property_mappings.exists()
|
||||
):
|
||||
if instance.sync_users and not instance.user_property_mappings.exists():
|
||||
return
|
||||
if instance.sync_groups and not instance.group_property_mappings.exists():
|
||||
return
|
||||
ldap_sync_single.delay(instance.pk)
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
|
||||
|
||||
@receiver(password_validate)
|
||||
|
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
@ -78,7 +78,9 @@ class MicrosoftActiveDirectory(BaseLDAPSynchronizer):
|
||||
# /useraccountcontrol-manipulate-account-properties
|
||||
uac_bit = attributes.get("userAccountControl", 512)
|
||||
uac = UserAccountControl(uac_bit)
|
||||
is_active = UserAccountControl.ACCOUNTDISABLE not in uac
|
||||
is_active = (
|
||||
UserAccountControl.ACCOUNTDISABLE not in uac and UserAccountControl.LOCKOUT not in uac
|
||||
)
|
||||
if is_active != user.is_active:
|
||||
user.is_active = is_active
|
||||
user.save()
|
||||
|
@ -50,3 +50,35 @@ class LDAPAPITests(APITestCase):
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
||||
def test_sync_users_mapping_empty(self):
|
||||
"""Check that when sync_users is enabled, property mappings must be set"""
|
||||
serializer = LDAPSourceSerializer(
|
||||
data={
|
||||
"name": "foo",
|
||||
"slug": " foo",
|
||||
"server_uri": "ldaps://1.2.3.4",
|
||||
"bind_cn": "",
|
||||
"bind_password": LDAP_PASSWORD,
|
||||
"base_dn": "dc=foo",
|
||||
"sync_users": True,
|
||||
"user_property_mappings": [],
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
||||
def test_sync_groups_mapping_empty(self):
|
||||
"""Check that when sync_groups is enabled, property mappings must be set"""
|
||||
serializer = LDAPSourceSerializer(
|
||||
data={
|
||||
"name": "foo",
|
||||
"slug": " foo",
|
||||
"server_uri": "ldaps://1.2.3.4",
|
||||
"bind_cn": "",
|
||||
"bind_password": LDAP_PASSWORD,
|
||||
"base_dn": "dc=foo",
|
||||
"sync_groups": True,
|
||||
"group_property_mappings": [],
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
@ -82,3 +82,5 @@ entries:
|
||||
order: 10
|
||||
target: !KeyOf default-authentication-flow-password-binding
|
||||
policy: !KeyOf default-authentication-flow-password-optional
|
||||
attrs:
|
||||
failure_result: true
|
||||
|
@ -2,7 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik 2024.8.2 Blueprint schema",
|
||||
"title": "authentik 2024.8.4 Blueprint schema",
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
|
@ -31,7 +31,7 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.2}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.4}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -52,7 +52,7 @@ services:
|
||||
- postgresql
|
||||
- redis
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.2}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.4}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
@ -29,4 +29,4 @@ func UserAgent() string {
|
||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2024.8.2"
|
||||
const VERSION = "2024.8.4"
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
sentryhttp "github.com/getsentry/sentry-go/http"
|
||||
@ -70,12 +71,20 @@ func NewProxyServer(ac *ak.APIController) *ProxyServer {
|
||||
}
|
||||
|
||||
func (ps *ProxyServer) HandleHost(rw http.ResponseWriter, r *http.Request) bool {
|
||||
// Always handle requests for outpost paths that should answer regardless of hostname
|
||||
if strings.HasPrefix(r.URL.Path, "/outpost.goauthentik.io/ping") ||
|
||||
strings.HasPrefix(r.URL.Path, "/outpost.goauthentik.io/static") {
|
||||
ps.mux.ServeHTTP(rw, r)
|
||||
return true
|
||||
}
|
||||
// lookup app by hostname
|
||||
a, _ := ps.lookupApp(r)
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
// check if the app should handle this URL, or is setup in proxy mode
|
||||
if a.ShouldHandleURL(r) || a.Mode() == api.PROXYMODE_PROXY {
|
||||
a.ServeHTTP(rw, r)
|
||||
ps.mux.ServeHTTP(rw, r)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "@goauthentik/authentik",
|
||||
"version": "2024.8.2",
|
||||
"version": "2024.8.4",
|
||||
"private": true
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "authentik"
|
||||
version = "2024.8.2"
|
||||
version = "2024.8.4"
|
||||
description = ""
|
||||
authors = ["authentik Team <hello@goauthentik.io>"]
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: authentik
|
||||
version: 2024.8.2
|
||||
version: 2024.8.4
|
||||
description: Making authentication simple.
|
||||
contact:
|
||||
email: hello@goauthentik.io
|
||||
|
16
web/package-lock.json
generated
16
web/package-lock.json
generated
@ -26750,7 +26750,9 @@
|
||||
}
|
||||
},
|
||||
"packages/sfe/node_modules/@goauthentik/api": {
|
||||
"version": "2024.6.0-1720200294"
|
||||
"version": "2024.6.0-1720200294",
|
||||
"resolved": "https://registry.npmjs.org/@goauthentik/api/-/api-2024.6.0-1720200294.tgz",
|
||||
"integrity": "sha512-qGpI+0BpsHWlO8waj89q+6SWjVVuRtYqdmpSIrKFsZt9GLNXCvIAvgS5JI1Sq2z1uWK/8kLNZKDocI/XagqMPQ=="
|
||||
},
|
||||
"packages/sfe/node_modules/@swc/cli": {
|
||||
"version": "0.4.0",
|
||||
@ -26783,7 +26785,9 @@
|
||||
"chokidar": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"resolved": "https://registry.npmjs.org/@swc/cli/-/cli-0.4.0.tgz",
|
||||
"integrity": "sha512-4JdVrPtF/4rCMXp6Q1h5I6YkYZrCCcqod7Wk97ZQq7K8vNGzJUryBv4eHCvqx5sJOJBrbYm9fcswe1B0TygNoA=="
|
||||
},
|
||||
"packages/sfe/node_modules/commander": {
|
||||
"version": "8.3.0",
|
||||
@ -26791,7 +26795,9 @@
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
},
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
|
||||
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="
|
||||
},
|
||||
"packages/sfe/node_modules/semver": {
|
||||
"version": "7.6.3",
|
||||
@ -26802,7 +26808,9 @@
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
|
||||
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +258,7 @@
|
||||
"lint:lockfile": {
|
||||
"__comment": "The lockfile-lint package does not have an option to ensure resolved hashes are set everywhere",
|
||||
"shell": true,
|
||||
"command": "[ -z \"$(jq -r '.packages | to_entries[] | select((.key | startswith(\"node_modules\")) and (.value | has(\"resolved\") | not)) | .key' < package-lock.json)\" ]"
|
||||
"command": "[ -z \"$(jq -r '.packages | to_entries[] | select((.key | contains(\"node_modules\")) and (.value | has(\"resolved\") | not)) | .key' < package-lock.json)\" ]"
|
||||
},
|
||||
"lint:lockfiles": {
|
||||
"dependencies": [
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { GoogleWorkspaceProviderGroup, ProvidersApi, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
GoogleWorkspaceProviderGroup,
|
||||
ProvidersApi,
|
||||
ProvidersGoogleWorkspaceSyncObjectCreateRequest,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-google-workspace-groups-list")
|
||||
export class GoogleWorkspaceProviderGroupList extends Table<GoogleWorkspaceProviderGroup> {
|
||||
@ -31,8 +36,11 @@ export class GoogleWorkspaceProviderGroupList extends Table<GoogleWorkspaceProvi
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.Group}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG)
|
||||
.providersGoogleWorkspaceSyncObjectCreate}
|
||||
.sync=${(data: ProvidersGoogleWorkspaceSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(
|
||||
DEFAULT_CONFIG,
|
||||
).providersGoogleWorkspaceSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { GoogleWorkspaceProviderUser, ProvidersApi, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
GoogleWorkspaceProviderUser,
|
||||
ProvidersApi,
|
||||
ProvidersGoogleWorkspaceSyncObjectCreateRequest,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-google-workspace-users-list")
|
||||
export class GoogleWorkspaceProviderUserList extends Table<GoogleWorkspaceProviderUser> {
|
||||
@ -31,8 +36,11 @@ export class GoogleWorkspaceProviderUserList extends Table<GoogleWorkspaceProvid
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.User}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG)
|
||||
.providersGoogleWorkspaceSyncObjectCreate}
|
||||
.sync=${(data: ProvidersGoogleWorkspaceSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(
|
||||
DEFAULT_CONFIG,
|
||||
).providersGoogleWorkspaceSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { MicrosoftEntraProviderGroup, ProvidersApi, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
MicrosoftEntraProviderGroup,
|
||||
ProvidersApi,
|
||||
ProvidersMicrosoftEntraSyncObjectCreateRequest,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-microsoft-entra-groups-list")
|
||||
export class MicrosoftEntraProviderGroupList extends Table<MicrosoftEntraProviderGroup> {
|
||||
@ -28,8 +33,11 @@ export class MicrosoftEntraProviderGroupList extends Table<MicrosoftEntraProvide
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.Group}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG)
|
||||
.providersMicrosoftEntraSyncObjectCreate}
|
||||
.sync=${(data: ProvidersMicrosoftEntraSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(
|
||||
DEFAULT_CONFIG,
|
||||
).providersMicrosoftEntraSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { MicrosoftEntraProviderUser, ProvidersApi, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
MicrosoftEntraProviderUser,
|
||||
ProvidersApi,
|
||||
ProvidersMicrosoftEntraSyncObjectCreateRequest,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-microsoft-entra-users-list")
|
||||
export class MicrosoftEntraProviderUserList extends Table<MicrosoftEntraProviderUser> {
|
||||
@ -31,8 +36,11 @@ export class MicrosoftEntraProviderUserList extends Table<MicrosoftEntraProvider
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.User}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG)
|
||||
.providersMicrosoftEntraSyncObjectCreate}
|
||||
.sync=${(data: ProvidersMicrosoftEntraSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(
|
||||
DEFAULT_CONFIG,
|
||||
).providersMicrosoftEntraSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -162,7 +162,6 @@ export class OAuth2ProviderFormPage extends BaseProviderForm<OAuth2Provider> {
|
||||
<ak-flow-search
|
||||
flowType=${FlowsInstancesListDesignationEnum.Authentication}
|
||||
.currentFlow=${provider?.authenticationFlow}
|
||||
required
|
||||
></ak-flow-search>
|
||||
<p class="pf-c-form__helper-text">
|
||||
${msg("Flow used when a user access this provider and is not authenticated.")}
|
||||
@ -171,7 +170,7 @@ export class OAuth2ProviderFormPage extends BaseProviderForm<OAuth2Provider> {
|
||||
<ak-form-element-horizontal
|
||||
name="authorizationFlow"
|
||||
label=${msg("Authorization flow")}
|
||||
?required=${true}
|
||||
required
|
||||
>
|
||||
<ak-flow-search
|
||||
flowType=${FlowsInstancesListDesignationEnum.Authorization}
|
||||
|
@ -38,12 +38,15 @@ export async function scimPropertyMappingsProvider(page = 1, search = "") {
|
||||
};
|
||||
}
|
||||
|
||||
export function makeSCIMPropertyMappingsSelector(instanceMappings: string[] | undefined) {
|
||||
export function makeSCIMPropertyMappingsSelector(
|
||||
instanceMappings: string[] | undefined,
|
||||
defaultSelected: string,
|
||||
) {
|
||||
const localMappings = instanceMappings ? new Set(instanceMappings) : undefined;
|
||||
return localMappings
|
||||
? ([pk, _]: DualSelectPair) => localMappings.has(pk)
|
||||
: ([_0, _1, _2, mapping]: DualSelectPair<SCIMMapping>) =>
|
||||
mapping?.managed === "goauthentik.io/providers/scim/user";
|
||||
mapping?.managed === defaultSelected;
|
||||
}
|
||||
|
||||
@customElement("ak-provider-scim-form")
|
||||
@ -172,6 +175,7 @@ export class SCIMProviderFormPage extends BaseProviderForm<SCIMProvider> {
|
||||
.provider=${scimPropertyMappingsProvider}
|
||||
.selector=${makeSCIMPropertyMappingsSelector(
|
||||
this.instance?.propertyMappings,
|
||||
"goauthentik.io/providers/scim/user",
|
||||
)}
|
||||
available-label=${msg("Available User Property Mappings")}
|
||||
selected-label=${msg("Selected User Property Mappings")}
|
||||
@ -188,6 +192,7 @@ export class SCIMProviderFormPage extends BaseProviderForm<SCIMProvider> {
|
||||
.provider=${scimPropertyMappingsProvider}
|
||||
.selector=${makeSCIMPropertyMappingsSelector(
|
||||
this.instance?.propertyMappingsGroup,
|
||||
"goauthentik.io/providers/scim/group",
|
||||
)}
|
||||
available-label=${msg("Available Group Property Mappings")}
|
||||
selected-label=${msg("Selected Group Property Mappings")}
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { ProvidersApi, SCIMProviderGroup, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
ProvidersApi,
|
||||
ProvidersScimSyncObjectCreateRequest,
|
||||
SCIMProviderGroup,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-scim-groups-list")
|
||||
export class SCIMProviderGroupList extends Table<SCIMProviderGroup> {
|
||||
@ -29,7 +34,9 @@ export class SCIMProviderGroupList extends Table<SCIMProviderGroup> {
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.Group}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG).providersScimSyncObjectCreate}
|
||||
.sync=${(data: ProvidersScimSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(DEFAULT_CONFIG).providersScimSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -8,7 +8,12 @@ import { msg } from "@lit/localize";
|
||||
import { TemplateResult, html } from "lit";
|
||||
import { customElement, property } from "lit/decorators.js";
|
||||
|
||||
import { ProvidersApi, SCIMProviderUser, SyncObjectModelEnum } from "@goauthentik/api";
|
||||
import {
|
||||
ProvidersApi,
|
||||
ProvidersScimSyncObjectCreateRequest,
|
||||
SCIMProviderUser,
|
||||
SyncObjectModelEnum,
|
||||
} from "@goauthentik/api";
|
||||
|
||||
@customElement("ak-provider-scim-users-list")
|
||||
export class SCIMProviderUserList extends Table<SCIMProviderUser> {
|
||||
@ -29,7 +34,9 @@ export class SCIMProviderUserList extends Table<SCIMProviderUser> {
|
||||
<ak-sync-object-form
|
||||
.provider=${this.providerId}
|
||||
model=${SyncObjectModelEnum.User}
|
||||
.sync=${new ProvidersApi(DEFAULT_CONFIG).providersScimSyncObjectCreate}
|
||||
.sync=${(data: ProvidersScimSyncObjectCreateRequest) => {
|
||||
return new ProvidersApi(DEFAULT_CONFIG).providersScimSyncObjectCreate(data);
|
||||
}}
|
||||
slot="form"
|
||||
>
|
||||
</ak-sync-object-form>
|
||||
|
@ -327,7 +327,7 @@ export class LDAPSourceForm extends BaseSourceForm<LDAPSource> {
|
||||
<ak-form-group>
|
||||
<span slot="header"> ${msg("Additional settings")} </span>
|
||||
<div slot="body" class="pf-c-form">
|
||||
<ak-form-element-horizontal label=${msg("Group")} name="syncParentGroup">
|
||||
<ak-form-element-horizontal label=${msg("Parent Group")} name="syncParentGroup">
|
||||
<ak-search-select
|
||||
.fetchObjects=${async (query?: string): Promise<Group[]> => {
|
||||
const args: CoreGroupsListRequest = {
|
||||
|
@ -102,10 +102,14 @@ export class UserDeviceTable extends Table<Device> {
|
||||
html`${item.name}`,
|
||||
html`${deviceTypeName(item)}`,
|
||||
html`${item.confirmed ? msg("Yes") : msg("No")}`,
|
||||
html`<div>${getRelativeTime(item.created)}</div>
|
||||
<small>${item.created.toLocaleString()}</small>`,
|
||||
html`<div>${getRelativeTime(item.lastUpdated)}</div>
|
||||
<small>${item.lastUpdated.toLocaleString()}</small>`,
|
||||
html`${item.created.getTime() > 0
|
||||
? html`<div>${getRelativeTime(item.created)}</div>
|
||||
<small>${item.created.toLocaleString()}</small>`
|
||||
: html`-`}`,
|
||||
html`${item.lastUpdated
|
||||
? html`<div>${getRelativeTime(item.lastUpdated)}</div>
|
||||
<small>${item.lastUpdated.toLocaleString()}</small>`
|
||||
: html`-`}`,
|
||||
html`${item.lastUsed
|
||||
? html`<div>${getRelativeTime(item.lastUsed)}</div>
|
||||
<small>${item.lastUsed.toLocaleString()}</small>`
|
||||
|
@ -3,7 +3,7 @@ export const SUCCESS_CLASS = "pf-m-success";
|
||||
export const ERROR_CLASS = "pf-m-danger";
|
||||
export const PROGRESS_CLASS = "pf-m-in-progress";
|
||||
export const CURRENT_CLASS = "pf-m-current";
|
||||
export const VERSION = "2024.8.2";
|
||||
export const VERSION = "2024.8.4";
|
||||
export const TITLE_DEFAULT = "authentik";
|
||||
export const ROUTE_SEPARATOR = ";";
|
||||
|
||||
|
3
website/.gitignore
vendored
3
website/.gitignore
vendored
@ -16,6 +16,9 @@
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# Wireit's cache
|
||||
.wireit
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
_Reported by [@m2a2](https://github.com/m2a2)_
|
||||
|
||||
## Improper Authorization for Token modification
|
||||
## Insufficient Authorization for several API endpoints
|
||||
|
||||
### Summary
|
||||
|
||||
|
35
website/docs/security/CVE-2024-47070.md
Normal file
35
website/docs/security/CVE-2024-47070.md
Normal file
@ -0,0 +1,35 @@
|
||||
# CVE-2024-47070
|
||||
|
||||
_Reported by [@efpi-bot](https://github.com/efpi-bot) from [LogicalTrust](https://logicaltrust.net/en/)_
|
||||
|
||||
## Password authentication bypass via X-Forwarded-For HTTP header
|
||||
|
||||
### Summary
|
||||
|
||||
The vulnerability allows bypassing policies by adding X-Forwarded-For header with unparsable IP address, e.g. "a". This results in a possibility to authenticate/authorize to any account with known login or email address.
|
||||
|
||||
Since the default authentication flow uses a policy to enable the password stage only when there is no password stage selected on the Identification stage, this vulnerability can be used to skip this policy and continue without the password stage.
|
||||
|
||||
### Am I affected
|
||||
|
||||
This can be exploited for the following configurations:
|
||||
|
||||
- An attacker can access authentik without a reverse proxy (and `AUTHENTIK_LISTEN__TRUSTED_PROXY_CIDRS` is not configured properly)
|
||||
- The reverse proxy configuration does not correctly overwrite X-Forwarded-For
|
||||
- Policies (User and group bindings do _not_ apply) are bound to authentication/authorization flows
|
||||
|
||||
### Patches
|
||||
|
||||
authentik 2024.6.5 and 2024.8.3 fix this issue.
|
||||
|
||||
### Workarounds
|
||||
|
||||
Ensure the X-Forwarded-For header is always set by the reverse proxy, and is always set to a correct IP.
|
||||
|
||||
In addition you can manually change the _Failure result_ option on policy bindings to _Pass_, which will prevent any stages from being skipped if a malicious request is received.
|
||||
|
||||
### For more information
|
||||
|
||||
If you have any questions or comments about this advisory:
|
||||
|
||||
- Email us at [security@goauthentik.io](mailto:security@goauthentik.io)
|
25
website/docs/security/CVE-2024-47077.md
Normal file
25
website/docs/security/CVE-2024-47077.md
Normal file
@ -0,0 +1,25 @@
|
||||
# CVE-2024-47077
|
||||
|
||||
_Reported by [@quentinmit](https://github.com/quentinmit)_
|
||||
|
||||
## Insufficient cross-provider token validation during introspection
|
||||
|
||||
### Summary
|
||||
|
||||
Access tokens issued to one application can be stolen by that application and used to impersonate the user against any other proxy provider. Also, a user can steal an access token they were legitimately issued for one application and use it to access another application that they aren't allowed to access.
|
||||
|
||||
### Details
|
||||
|
||||
The proxy provider uses `/application/o/introspect/` to validate bearer tokens provided in the `Authorization` header:
|
||||
|
||||
The implementation of this endpoint separately validates the `client_id` and `client_secret` (which are that of the proxy provider) and the `token` without validating that they correspond to the same provider.
|
||||
|
||||
### Patches
|
||||
|
||||
authentik 2024.6.5 and 2024.8.3 fix this issue.
|
||||
|
||||
### For more information
|
||||
|
||||
If you have any questions or comments about this advisory:
|
||||
|
||||
- Email us at [security@goauthentik.io](mailto:security@goauthentik.io)
|
@ -18,13 +18,13 @@ To create or edit a source in authentik, open the Admin interface and navigate t
|
||||
|
||||
- **Update internal password on login**: When the user logs in to authentik using the LDAP password backend, the password is stored as a hashed value in authentik. Toggle off (default setting) if you do not want to store the hashed passwords in authentik.
|
||||
|
||||
- **Synch User**: Enable or disable user synchronization between authentik and the LDAP source.
|
||||
- **Sync users**: Enable or disable user synchronization between authentik and the LDAP source.
|
||||
|
||||
- **User password writeback**: Enable this option if you want to write password changes that are made in authentik back to LDAP.
|
||||
|
||||
- **Synch groups**: Enable/disable group synchronization. Groups are synced in the background every 5 minutes.
|
||||
- **Sync groups**: Enable/disable group synchronization. Groups are synced in the background every 5 minutes.
|
||||
|
||||
- **Sync parent group**: Optionally set this group as the parent group for all synced groups. An example use case of this would be to import Active Directory groups under a root `imported-from-ad` group.
|
||||
- **Parent group**: Optionally set this group as the parent group for all synced groups. An example use case of this would be to import Active Directory groups under a root `imported-from-ad` group.
|
||||
|
||||
#### Connection settings
|
||||
|
||||
@ -45,7 +45,11 @@ To create or edit a source in authentik, open the Admin interface and navigate t
|
||||
|
||||
#### LDAP Attribute mapping
|
||||
|
||||
- **User Property mappings** and **Group Property Mappings**: Define which LDAP properties map to which authentik properties. The default set of property mappings is generated for Active Directory. See also our documentation on [property mappings](#ldap-source-property-mappings).
|
||||
- **User Property Mappings** and **Group Property Mappings**: Define which LDAP properties map to which authentik properties. The default set of property mappings is generated for Active Directory. See also our documentation on [property mappings](#ldap-source-property-mappings).
|
||||
|
||||
:::warning
|
||||
When the **Sync users** and or the **Sync groups** options are enabled, their respective property mapping options must have at least one mapping selected, otherwise the sync will not start.
|
||||
:::
|
||||
|
||||
#### Additional Settings
|
||||
|
||||
|
6323
website/package-lock.json
generated
6323
website/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -8,7 +8,7 @@
|
||||
"build-bundled": "cp ../schema.yml static/schema.yaml && docusaurus gen-api-docs all && cross-env NODE_OPTIONS='--max_old_space_size=65536' docusaurus build",
|
||||
"deploy": "docusaurus deploy",
|
||||
"docusaurus": "docusaurus",
|
||||
"lint:lockfile": "lockfile-lint --path package.json --type npm --allowed-hosts npm --validate-https",
|
||||
"lint:lockfile": "wireit",
|
||||
"prettier": "prettier --write .",
|
||||
"prettier-check": "prettier --check .",
|
||||
"serve": "docusaurus serve",
|
||||
@ -56,9 +56,16 @@
|
||||
"@docusaurus/types": "^3.3.2",
|
||||
"@types/react": "^18.3.4",
|
||||
"cross-env": "^7.0.3",
|
||||
"lockfile-lint": "^4.14.0",
|
||||
"prettier": "3.3.3",
|
||||
"typescript": "~5.5.4"
|
||||
"typescript": "~5.6.2",
|
||||
"wireit": "^0.14.9"
|
||||
},
|
||||
"wireit": {
|
||||
"lint:lockfile": {
|
||||
"__comment": "The lockfile-lint package does not have an option to ensure resolved hashes are set everywhere",
|
||||
"shell": true,
|
||||
"command": "[ -z \"$(jq -r '.packages | to_entries[] | select((.key | contains(\"node_modules\")) and (.value | has(\"resolved\") | not)) | .key' < package-lock.json)\" ]"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
|
@ -521,6 +521,8 @@ const docsSidebar = {
|
||||
items: [
|
||||
"security/security-hardening",
|
||||
"security/policy",
|
||||
"security/CVE-2024-47077",
|
||||
"security/CVE-2024-47070",
|
||||
"security/CVE-2024-42490",
|
||||
"security/CVE-2024-38371",
|
||||
"security/CVE-2024-37905",
|
||||
|
Reference in New Issue
Block a user