Compare commits
10 Commits
expiring-m
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
2fb097061d | |||
8962d17e03 | |||
8326e1490c | |||
091e4d3e4c | |||
6ee77edcbb | |||
763e2288bf | |||
9cdb177ca7 | |||
6070508058 | |||
ec13a5d84d | |||
057de82b01 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2024.8.3
|
||||
current_version = 2024.8.0
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
|
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@ -23,6 +23,7 @@ updates:
|
||||
- package-ecosystem: npm
|
||||
directories:
|
||||
- "/web"
|
||||
- "/tests/wdio"
|
||||
- "/web/sfe"
|
||||
schedule:
|
||||
interval: daily
|
||||
@ -43,11 +44,9 @@ updates:
|
||||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@eslint/*"
|
||||
- "@typescript-eslint/*"
|
||||
- "eslint-*"
|
||||
- "eslint"
|
||||
- "typescript-eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
patterns:
|
||||
- "@storybook/*"
|
||||
@ -55,12 +54,10 @@ updates:
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
- "esbuild*"
|
||||
rollup:
|
||||
patterns:
|
||||
- "@rollup/*"
|
||||
- "rollup-*"
|
||||
- "rollup*"
|
||||
swc:
|
||||
patterns:
|
||||
- "@swc/*"
|
||||
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -1,7 +1,7 @@
|
||||
<!--
|
||||
👋 Hi there! Welcome.
|
||||
|
||||
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute
|
||||
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
|
||||
-->
|
||||
|
||||
## Details
|
||||
|
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@ -40,7 +40,7 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
18
.github/workflows/ci-main.yml
vendored
18
.github/workflows/ci-main.yml
vendored
@ -120,12 +120,6 @@ jobs:
|
||||
with:
|
||||
flags: unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: unit
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
@ -144,12 +138,6 @@ jobs:
|
||||
with:
|
||||
flags: integration
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: integration
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
@ -202,12 +190,6 @@ jobs:
|
||||
with:
|
||||
flags: e2e
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: e2e
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
ci-core-mark:
|
||||
needs:
|
||||
- lint
|
||||
|
22
.github/workflows/ci-web.yml
vendored
22
.github/workflows/ci-web.yml
vendored
@ -24,11 +24,17 @@ jobs:
|
||||
- prettier-check
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
include:
|
||||
- command: tsc
|
||||
project: web
|
||||
- command: lit-analyse
|
||||
project: web
|
||||
exclude:
|
||||
- command: lint:lockfile
|
||||
project: tests/wdio
|
||||
- command: tsc
|
||||
project: tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
@ -39,12 +45,21 @@ jobs:
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
run: |
|
||||
npm ci
|
||||
${{ matrix.extra_setup }}
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: Lint
|
||||
working-directory: ${{ matrix.project }}/
|
||||
run: npm run ${{ matrix.command }}
|
||||
ci-web-mark:
|
||||
needs:
|
||||
- lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
build:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -60,13 +75,6 @@ jobs:
|
||||
- name: build
|
||||
working-directory: web/
|
||||
run: npm run build
|
||||
ci-web-mark:
|
||||
needs:
|
||||
- build
|
||||
- lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: echo mark
|
||||
test:
|
||||
needs:
|
||||
- ci-web-mark
|
||||
|
@ -24,7 +24,7 @@ jobs:
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- run: poetry run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
||||
with:
|
||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
id: cpr
|
||||
with:
|
||||
|
@ -32,7 +32,7 @@ jobs:
|
||||
poetry run ak compilemessages
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: extract-compile-backend-translation
|
||||
|
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS python-deps
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
@ -124,7 +124,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
pip install --force-reinstall /wheels/*"
|
||||
|
||||
# Stage 6: Run
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS final-image
|
||||
|
||||
ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
|
5
Makefile
5
Makefile
@ -19,13 +19,14 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
-S 'web/src/locales/**' \
|
||||
-S 'website/docs/developer-docs/api/reference/**' \
|
||||
-S 'website/developer-docs/api/reference/**' \
|
||||
authentik \
|
||||
internal \
|
||||
cmd \
|
||||
web/src \
|
||||
website/src \
|
||||
website/blog \
|
||||
website/developer-docs \
|
||||
website/docs \
|
||||
website/integrations \
|
||||
website/src
|
||||
@ -204,7 +205,7 @@ gen: gen-build gen-client-ts
|
||||
web-build: web-install ## Build the Authentik UI
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
web: web-lint-fix web-lint web-check-compile web-test ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
|
||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||
cd web && npm ci
|
||||
|
@ -34,7 +34,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
||||
|
||||
## Development
|
||||
|
||||
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
|
||||
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
|
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | --------- |
|
||||
| 2024.4.x | ✅ |
|
||||
| 2024.6.x | ✅ |
|
||||
| 2024.8.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2024.8.3"
|
||||
__version__ = "2024.8.0"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
@ -1,8 +1,10 @@
|
||||
"""authentik admin tasks"""
|
||||
|
||||
import re
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import URLValidator
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from packaging.version import parse
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
@ -19,6 +21,8 @@ LOGGER = get_logger()
|
||||
VERSION_NULL = "0.0.0"
|
||||
VERSION_CACHE_KEY = "authentik_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||
# Chop of the first ^ because we want to search the entire string
|
||||
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||
LOCAL_VERSION = parse(__version__)
|
||||
|
||||
|
||||
@ -74,16 +78,10 @@ def update_latest_version(self: SystemTask):
|
||||
context__new_version=upstream_version,
|
||||
).exists():
|
||||
return
|
||||
Event.new(
|
||||
EventAction.UPDATE_AVAILABLE,
|
||||
message=_(
|
||||
"New version {version} available!".format(
|
||||
version=upstream_version,
|
||||
)
|
||||
),
|
||||
new_version=upstream_version,
|
||||
changelog=data.get("stable", {}).get("changelog_url"),
|
||||
).save()
|
||||
event_dict = {"new_version": upstream_version}
|
||||
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
|
||||
event_dict["message"] = f"Changelog: {match.group()}"
|
||||
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.set_error(exc)
|
||||
|
@ -17,7 +17,6 @@ RESPONSE_VALID = {
|
||||
"stable": {
|
||||
"version": "99999999.9999999",
|
||||
"changelog": "See https://goauthentik.io/test",
|
||||
"changelog_url": "https://goauthentik.io/test",
|
||||
"reason": "bugfix",
|
||||
},
|
||||
}
|
||||
@ -36,7 +35,7 @@ class TestAdminTasks(TestCase):
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="New version 99999999.9999999 available!",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
).exists()
|
||||
)
|
||||
# test that a consecutive check doesn't create a duplicate event
|
||||
@ -46,7 +45,7 @@ class TestAdminTasks(TestCase):
|
||||
Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context__new_version="99999999.9999999",
|
||||
context__message="New version 99999999.9999999 available!",
|
||||
context__message="Changelog: https://goauthentik.io/test",
|
||||
)
|
||||
),
|
||||
1,
|
||||
|
@ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(
|
||||
[
|
||||
_("Failed to validate blueprint"),
|
||||
*[f"- {x.event}" for x in logs],
|
||||
]
|
||||
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
|
||||
)
|
||||
return content
|
||||
|
||||
|
@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
||||
if version != 1:
|
||||
return
|
||||
blueprint_file.seek(0)
|
||||
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||
instance: BlueprintInstance = (
|
||||
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||
)
|
||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
|
@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
|
||||
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||
)
|
||||
|
@ -69,7 +69,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/docs/customize/blueprints/v1/models.md when used
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
@ -429,7 +429,7 @@ class Importer:
|
||||
orig_import = deepcopy(self._import)
|
||||
if self._import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)]
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
|
@ -30,10 +30,8 @@ from authentik.core.api.utils import (
|
||||
PassiveSerializer,
|
||||
)
|
||||
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
||||
from authentik.core.models import Group, PropertyMapping, User
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.policies.api.exec import PolicyTestSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
@ -164,15 +162,12 @@ class PropertyMappingViewSet(
|
||||
|
||||
response_data = {"successful": True, "result": ""}
|
||||
try:
|
||||
result = mapping.evaluate(dry_run=True, **context)
|
||||
result = mapping.evaluate(**context)
|
||||
response_data["result"] = dumps(
|
||||
sanitize_item(result), indent=(4 if format_result else None)
|
||||
)
|
||||
except PropertyMappingExpressionException as exc:
|
||||
response_data["result"] = exception_to_string(exc.exc)
|
||||
response_data["successful"] = False
|
||||
except Exception as exc:
|
||||
response_data["result"] = exception_to_string(exc)
|
||||
response_data["result"] = str(exc)
|
||||
response_data["successful"] = False
|
||||
response = PropertyMappingTestResultSerializer(response_data)
|
||||
return Response(response.data)
|
||||
|
@ -38,7 +38,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
"name",
|
||||
"authentication_flow",
|
||||
"authorization_flow",
|
||||
"invalidation_flow",
|
||||
"property_mappings",
|
||||
"component",
|
||||
"assigned_application_slug",
|
||||
@ -51,7 +50,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||
]
|
||||
extra_kwargs = {
|
||||
"authorization_flow": {"required": True, "allow_null": False},
|
||||
"invalidation_flow": {"required": True, "allow_null": False},
|
||||
}
|
||||
|
||||
|
||||
|
@ -678,13 +678,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
if not request.tenant.impersonation:
|
||||
LOGGER.debug("User attempted to impersonate", user=request.user)
|
||||
return Response(status=401)
|
||||
user_to_be = self.get_object()
|
||||
# Check both object-level perms and global perms
|
||||
if not request.user.has_perm(
|
||||
"authentik_core.impersonate", user_to_be
|
||||
) and not request.user.has_perm("authentik_core.impersonate"):
|
||||
if not request.user.has_perm("impersonate"):
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
user_to_be = self.get_object()
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
|
@ -9,11 +9,10 @@ class Command(TenantCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("--type", type=str, required=True)
|
||||
parser.add_argument("--all", action="store_true", default=False)
|
||||
parser.add_argument("usernames", nargs="*", type=str)
|
||||
parser.add_argument("--all", action="store_true")
|
||||
parser.add_argument("usernames", nargs="+", type=str)
|
||||
|
||||
def handle_per_tenant(self, **options):
|
||||
print(options)
|
||||
new_type = UserTypes(options["type"])
|
||||
qs = (
|
||||
User.objects.exclude_anonymous()
|
||||
@ -23,9 +22,6 @@ class Command(TenantCommand):
|
||||
if options["usernames"] and options["all"]:
|
||||
self.stderr.write("--all and usernames specified, only one can be specified")
|
||||
return
|
||||
if not options["usernames"] and not options["all"]:
|
||||
self.stderr.write("--all or usernames must be specified")
|
||||
return
|
||||
if options["usernames"] and not options["all"]:
|
||||
qs = qs.filter(username__in=options["usernames"])
|
||||
updated = qs.update(type=new_type)
|
||||
|
@ -1,55 +0,0 @@
|
||||
# Generated by Django 5.0.9 on 2024-10-02 11:35
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_invalidation_flow_default(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from authentik.flows.models import FlowDesignation, FlowAuthenticationRequirement
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
Provider = apps.get_model("authentik_core", "Provider")
|
||||
|
||||
# So this flow is managed via a blueprint, bue we're in a migration so we don't want to rely on that
|
||||
# since the blueprint is just an empty flow we can just create it here
|
||||
# and let it be managed by the blueprint later
|
||||
flow, _ = Flow.objects.using(db_alias).update_or_create(
|
||||
slug="default-provider-invalidation-flow",
|
||||
defaults={
|
||||
"name": "Logged out of application",
|
||||
"title": "You've logged out of %(app)s.",
|
||||
"authentication": FlowAuthenticationRequirement.NONE,
|
||||
"designation": FlowDesignation.INVALIDATION,
|
||||
},
|
||||
)
|
||||
Provider.objects.using(db_alias).filter(invalidation_flow=None).update(invalidation_flow=flow)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0039_source_group_matching_mode_alter_group_name_and_more"),
|
||||
("authentik_flows", "0027_auto_20231028_1424"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="provider",
|
||||
name="invalidation_flow",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
help_text="Flow used ending the session from a provider.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||
related_name="provider_invalidation",
|
||||
to="authentik_flows.flow",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_invalidation_flow_default),
|
||||
]
|
@ -391,23 +391,14 @@ class Provider(SerializerModel):
|
||||
),
|
||||
related_name="provider_authentication",
|
||||
)
|
||||
|
||||
authorization_flow = models.ForeignKey(
|
||||
"authentik_flows.Flow",
|
||||
# Set to cascade even though null is allowed, since most providers
|
||||
# still require an authorization flow set
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
help_text=_("Flow used when authorizing this provider."),
|
||||
related_name="provider_authorization",
|
||||
)
|
||||
invalidation_flow = models.ForeignKey(
|
||||
"authentik_flows.Flow",
|
||||
on_delete=models.SET_DEFAULT,
|
||||
default=None,
|
||||
null=True,
|
||||
help_text=_("Flow used ending the session from a provider."),
|
||||
related_name="provider_invalidation",
|
||||
)
|
||||
|
||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||
|
||||
@ -475,6 +466,8 @@ class ApplicationQuerySet(QuerySet):
|
||||
def with_provider(self) -> "QuerySet[Application]":
|
||||
qs = self.select_related("provider")
|
||||
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
||||
if LOOKUP_SEP in subclass:
|
||||
continue
|
||||
qs = qs.select_related(f"provider__{subclass}")
|
||||
return qs
|
||||
|
||||
@ -552,24 +545,15 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
if not self.provider:
|
||||
return None
|
||||
|
||||
candidates = []
|
||||
base_class = Provider
|
||||
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
|
||||
parent = self.provider
|
||||
for level in subclass.split(LOOKUP_SEP):
|
||||
try:
|
||||
parent = getattr(parent, level)
|
||||
except AttributeError:
|
||||
break
|
||||
if parent in candidates:
|
||||
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
||||
# We don't care about recursion, skip nested models
|
||||
if LOOKUP_SEP in subclass:
|
||||
continue
|
||||
idx = subclass.count(LOOKUP_SEP)
|
||||
if type(parent) is not base_class:
|
||||
idx += 1
|
||||
candidates.insert(idx, parent)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1]
|
||||
try:
|
||||
return getattr(self.provider, subclass)
|
||||
except AttributeError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return str(self.name)
|
||||
@ -802,25 +786,12 @@ class ExpiringModel(models.Model):
|
||||
return self.delete(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _not_expired_filter(cls):
|
||||
return Q(expires__gt=now(), expiring=True) | Q(expiring=False)
|
||||
|
||||
@classmethod
|
||||
def filter_not_expired(cls, delete_expired=False, **kwargs) -> QuerySet["ExpiringModel"]:
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet["Token"]:
|
||||
"""Filer for tokens which are not expired yet or are not expiring,
|
||||
and match filters in `kwargs`"""
|
||||
if delete_expired:
|
||||
cls.delete_expired(**kwargs)
|
||||
return cls.objects.filter(cls._not_expired_filter()).filter(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def delete_expired(cls, **kwargs) -> int:
|
||||
objects = cls.objects.all().exclude(cls._not_expired_filter()).filter(**kwargs)
|
||||
amount = 0
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
amount += 1
|
||||
return amount
|
||||
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):
|
||||
obj.delete()
|
||||
return cls.objects.filter(**kwargs)
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
@ -930,7 +901,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
||||
except ControlFlowException as exc:
|
||||
raise exc
|
||||
except Exception as exc:
|
||||
raise PropertyMappingExpressionException(exc, self) from exc
|
||||
raise PropertyMappingExpressionException(self, exc) from exc
|
||||
|
||||
def __str__(self):
|
||||
return f"Property Mapping {self.name}"
|
||||
|
@ -30,7 +30,12 @@ def clean_expired_models(self: SystemTask):
|
||||
messages = []
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
amount = cls.delete_expired()
|
||||
objects = (
|
||||
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
|
||||
)
|
||||
amount = objects.count()
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
# Special case
|
||||
|
43
authentik/core/templates/if/end_session.html
Normal file
43
authentik/core/templates/if/end_session.html
Normal file
@ -0,0 +1,43 @@
|
||||
{% extends 'login/base_full.html' %}
|
||||
|
||||
{% load static %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block title %}
|
||||
{% trans 'End session' %} - {{ brand.branding_title }}
|
||||
{% endblock %}
|
||||
|
||||
{% block card_title %}
|
||||
{% blocktrans with application=application.name %}
|
||||
You've logged out of {{ application }}.
|
||||
{% endblocktrans %}
|
||||
{% endblock %}
|
||||
|
||||
{% block card %}
|
||||
<form method="POST" class="pf-c-form">
|
||||
<p>
|
||||
{% blocktrans with application=application.name branding_title=brand.branding_title %}
|
||||
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
||||
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">
|
||||
{% trans 'Go back to overview' %}
|
||||
</a>
|
||||
|
||||
<a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary">
|
||||
{% blocktrans with branding_title=brand.branding_title %}
|
||||
Log out of {{ branding_title }}
|
||||
{% endblocktrans %}
|
||||
</a>
|
||||
|
||||
{% if application.get_launch_url %}
|
||||
<a href="{{ application.get_launch_url }}" class="pf-c-button pf-m-secondary">
|
||||
{% blocktrans with application=application.name %}
|
||||
Log back into {{ application }}
|
||||
{% endblocktrans %}
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
</form>
|
||||
{% endblock %}
|
@ -9,12 +9,9 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
from authentik.providers.saml.models import SAMLProvider
|
||||
|
||||
|
||||
class TestApplicationsAPI(APITestCase):
|
||||
@ -134,7 +131,6 @@ class TestApplicationsAPI(APITestCase):
|
||||
"assigned_application_name": "allowed",
|
||||
"assigned_application_slug": "allowed",
|
||||
"authentication_flow": None,
|
||||
"invalidation_flow": None,
|
||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||
"component": "ak-provider-oauth2-form",
|
||||
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||
@ -187,7 +183,6 @@ class TestApplicationsAPI(APITestCase):
|
||||
"assigned_application_name": "allowed",
|
||||
"assigned_application_slug": "allowed",
|
||||
"authentication_flow": None,
|
||||
"invalidation_flow": None,
|
||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||
"component": "ak-provider-oauth2-form",
|
||||
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||
@ -227,31 +222,3 @@ class TestApplicationsAPI(APITestCase):
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_get_provider(self):
|
||||
"""Ensure that proxy providers (at the time of writing that is the only provider
|
||||
that inherits from another proxy type (OAuth) instead of inheriting from the root
|
||||
provider class) is correctly looked up and selected from the database"""
|
||||
slug = generate_id()
|
||||
provider = ProxyProvider.objects.create(name=generate_id())
|
||||
Application.objects.create(
|
||||
name=generate_id(),
|
||||
slug=slug,
|
||||
provider=provider,
|
||||
)
|
||||
self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider)
|
||||
self.assertEqual(
|
||||
Application.objects.with_provider().get(slug=slug).get_provider(), provider
|
||||
)
|
||||
|
||||
slug = generate_id()
|
||||
provider = SAMLProvider.objects.create(name=generate_id())
|
||||
Application.objects.create(
|
||||
name=generate_id(),
|
||||
slug=slug,
|
||||
provider=provider,
|
||||
)
|
||||
self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider)
|
||||
self.assertEqual(
|
||||
Application.objects.with_provider().get(slug=slug).get_provider(), provider
|
||||
)
|
||||
|
@ -3,10 +3,10 @@
|
||||
from json import loads
|
||||
|
||||
from django.urls import reverse
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.tenants.utils import get_current_tenant
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ class TestImpersonation(APITestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.other_user = create_test_user()
|
||||
self.other_user = User.objects.create(username="to-impersonate")
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
def test_impersonate_simple(self):
|
||||
@ -44,46 +44,6 @@ class TestImpersonation(APITestCase):
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
self.assertNotIn("original", response_body)
|
||||
|
||||
def test_impersonate_global(self):
|
||||
"""Test impersonation with global permissions"""
|
||||
new_user = create_test_user()
|
||||
assign_perm("authentik_core.impersonate", new_user)
|
||||
assign_perm("authentik_core.view_user", new_user)
|
||||
self.client.force_login(new_user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:user-impersonate",
|
||||
kwargs={"pk": self.other_user.pk},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
self.assertEqual(response_body["original"]["username"], new_user.username)
|
||||
|
||||
def test_impersonate_scoped(self):
|
||||
"""Test impersonation with scoped permissions"""
|
||||
new_user = create_test_user()
|
||||
assign_perm("authentik_core.impersonate", new_user, self.other_user)
|
||||
assign_perm("authentik_core.view_user", new_user, self.other_user)
|
||||
self.client.force_login(new_user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_api:user-impersonate",
|
||||
kwargs={"pk": self.other_user.pk},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
self.assertEqual(response_body["original"]["username"], new_user.username)
|
||||
|
||||
def test_impersonate_denied(self):
|
||||
"""test impersonation without permissions"""
|
||||
self.client.force_login(self.other_user)
|
||||
|
@ -19,6 +19,7 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
authorization_flow = create_test_flow()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
@ -29,8 +30,7 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": str(create_test_flow().pk),
|
||||
"invalidation_flow": str(create_test_flow().pk),
|
||||
"authorization_flow": str(authorization_flow.pk),
|
||||
},
|
||||
},
|
||||
)
|
||||
@ -56,16 +56,10 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": "",
|
||||
"invalidation_flow": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{
|
||||
"provider": {
|
||||
"authorization_flow": ["This field may not be null."],
|
||||
"invalidation_flow": ["This field may not be null."],
|
||||
}
|
||||
},
|
||||
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||
)
|
||||
|
@ -24,6 +24,7 @@ from authentik.core.views.interface import (
|
||||
InterfaceView,
|
||||
RootRedirectView,
|
||||
)
|
||||
from authentik.core.views.session import EndSessionView
|
||||
from authentik.flows.views.interface import FlowInterfaceView
|
||||
from authentik.root.asgi_middleware import SessionMiddleware
|
||||
from authentik.root.messages.consumer import MessageConsumer
|
||||
@ -59,6 +60,11 @@ urlpatterns = [
|
||||
ensure_csrf_cookie(FlowInterfaceView.as_view()),
|
||||
name="if-flow",
|
||||
),
|
||||
path(
|
||||
"if/session-end/<slug:application_slug>/",
|
||||
ensure_csrf_cookie(EndSessionView.as_view()),
|
||||
name="if-session-end",
|
||||
),
|
||||
# Fallback for WS
|
||||
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")),
|
||||
path(
|
||||
|
23
authentik/core/views/session.py
Normal file
23
authentik/core/views/session.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""authentik Session Views"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.views.generic.base import TemplateView
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
|
||||
|
||||
class EndSessionView(TemplateView, PolicyAccessView):
|
||||
"""Allow the client to end the Session"""
|
||||
|
||||
template_name = "if/end_session.html"
|
||||
|
||||
def resolve_provider_application(self):
|
||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["application"] = self.application
|
||||
return context
|
@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.core.models import User, UserTypes
|
||||
from authentik.enterprise.license import LicenseKey, LicenseSummarySerializer
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.enterprise.models import License, LicenseUsageStatus
|
||||
from authentik.rbac.decorators import permission_required
|
||||
from authentik.tenants.utils import get_unique_identifier
|
||||
|
||||
@ -29,7 +29,7 @@ class EnterpriseRequiredMixin:
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
"""Check that a valid license exists"""
|
||||
if not LicenseKey.cached_summary().status.is_valid:
|
||||
if LicenseKey.cached_summary().status != LicenseUsageStatus.UNLICENSED:
|
||||
raise ValidationError(_("Enterprise is required to create/update this object."))
|
||||
return super().validate(attrs)
|
||||
|
||||
|
@ -121,9 +121,6 @@ class LicenseKey:
|
||||
),
|
||||
)
|
||||
except PyJWTError:
|
||||
unverified = decode(jwt, options={"verify_signature": False})
|
||||
if unverified["aud"] != get_license_aud():
|
||||
raise ValidationError("Invalid Install ID in license") from None
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
return body
|
||||
|
||||
|
@ -68,7 +68,6 @@ class TestEndpointsAPI(APITestCase):
|
||||
"name": self.provider.name,
|
||||
"authentication_flow": None,
|
||||
"authorization_flow": None,
|
||||
"invalidation_flow": None,
|
||||
"property_mappings": [],
|
||||
"connection_expiry": "hours=8",
|
||||
"delete_token_on_disconnect": False,
|
||||
@ -121,7 +120,6 @@ class TestEndpointsAPI(APITestCase):
|
||||
"name": self.provider.name,
|
||||
"authentication_flow": None,
|
||||
"authorization_flow": None,
|
||||
"invalidation_flow": None,
|
||||
"property_mappings": [],
|
||||
"component": "ak-provider-rac-form",
|
||||
"assigned_application_slug": self.app.slug,
|
||||
@ -151,7 +149,6 @@ class TestEndpointsAPI(APITestCase):
|
||||
"name": self.provider.name,
|
||||
"authentication_flow": None,
|
||||
"authorization_flow": None,
|
||||
"invalidation_flow": None,
|
||||
"property_mappings": [],
|
||||
"component": "ak-provider-rac-form",
|
||||
"assigned_application_slug": self.app.slug,
|
||||
|
@ -3,7 +3,7 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models.signals import post_delete, post_save, pre_save
|
||||
from django.db.models.signals import post_save, pre_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import get_current_timezone
|
||||
|
||||
@ -27,9 +27,3 @@ def post_save_license(sender: type[License], instance: License, **_):
|
||||
"""Trigger license usage calculation when license is saved"""
|
||||
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
||||
enterprise_update_usage.delay()
|
||||
|
||||
|
||||
@receiver(post_delete, sender=License)
|
||||
def post_delete_license(sender: type[License], instance: License, **_):
|
||||
"""Clear license cache when license is deleted"""
|
||||
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
||||
|
@ -69,5 +69,8 @@ class NotificationViewSet(
|
||||
@action(detail=False, methods=["post"])
|
||||
def mark_all_seen(self, request: Request) -> Response:
|
||||
"""Mark all the user's notifications as seen"""
|
||||
Notification.objects.filter(user=request.user, seen=False).update(seen=True)
|
||||
notifications = Notification.objects.filter(user=request.user)
|
||||
for notification in notifications:
|
||||
notification.seen = True
|
||||
Notification.objects.bulk_update(notifications, ["seen"])
|
||||
return Response({}, status=204)
|
||||
|
@ -50,7 +50,7 @@ class ASNContextProcessor(MMDBContextProcessor):
|
||||
"""Wrapper for Reader.asn"""
|
||||
with start_span(
|
||||
op="authentik.events.asn.asn",
|
||||
name=ip_address,
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.configured():
|
||||
return None
|
||||
|
@ -51,7 +51,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
|
||||
"""Wrapper for Reader.city"""
|
||||
with start_span(
|
||||
op="authentik.events.geo.city",
|
||||
name=ip_address,
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.configured():
|
||||
return None
|
||||
|
@ -49,7 +49,6 @@ from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.tenants.models import Tenant
|
||||
from authentik.tenants.utils import get_current_tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
DISCORD_FIELD_LIMIT = 25
|
||||
@ -59,11 +58,7 @@ NOTIFICATION_SUMMARY_LENGTH = 75
|
||||
def default_event_duration():
|
||||
"""Default duration an Event is saved.
|
||||
This is used as a fallback when no brand is available"""
|
||||
try:
|
||||
tenant = get_current_tenant()
|
||||
return now() + timedelta_from_string(tenant.event_retention)
|
||||
except Tenant.DoesNotExist:
|
||||
return now() + timedelta(days=365)
|
||||
return now() + timedelta(days=365)
|
||||
|
||||
|
||||
def default_brand():
|
||||
@ -250,6 +245,12 @@ class Event(SerializerModel, ExpiringModel):
|
||||
if QS_QUERY in self.context["http_request"]["args"]:
|
||||
wrapped = self.context["http_request"]["args"][QS_QUERY]
|
||||
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
|
||||
if hasattr(request, "tenant"):
|
||||
tenant: Tenant = request.tenant
|
||||
# Because self.created only gets set on save, we can't use it's value here
|
||||
# hence we set self.created to now and then use it
|
||||
self.created = now()
|
||||
self.expires = self.created + timedelta_from_string(tenant.event_retention)
|
||||
if hasattr(request, "brand"):
|
||||
brand: Brand = request.brand
|
||||
self.brand = sanitize_dict(model_to_dict(brand))
|
||||
|
@ -13,7 +13,7 @@ from authentik.events.apps import SYSTEM_TASK_STATUS
|
||||
from authentik.events.models import Event, EventAction, SystemTask
|
||||
from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_OUTPOST, PLAN_CONTEXT_SOURCE, FlowPlan
|
||||
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
from authentik.stages.invitation.models import Invitation
|
||||
@ -38,9 +38,6 @@ def on_user_logged_in(sender, request: HttpRequest, user: User, **_):
|
||||
# Save the login method used
|
||||
kwargs[PLAN_CONTEXT_METHOD] = flow_plan.context[PLAN_CONTEXT_METHOD]
|
||||
kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||
if PLAN_CONTEXT_OUTPOST in flow_plan.context:
|
||||
# Save outpost context
|
||||
kwargs[PLAN_CONTEXT_OUTPOST] = flow_plan.context[PLAN_CONTEXT_OUTPOST]
|
||||
event = Event.new(EventAction.LOGIN, **kwargs).from_http(request, user=user)
|
||||
request.session[SESSION_LOGIN_EVENT] = event
|
||||
|
||||
|
@ -6,7 +6,6 @@ from django.db.models import Model
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.models import default_token_key
|
||||
from authentik.events.models import default_event_duration
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
|
||||
|
||||
@ -21,7 +20,7 @@ def model_tester_factory(test_model: type[Model]) -> Callable:
|
||||
allowed = 0
|
||||
# Token-like objects need to lookup the current tenant to get the default token length
|
||||
for field in test_model._meta.fields:
|
||||
if field.default in [default_token_key, default_event_duration]:
|
||||
if field.default == default_token_key:
|
||||
allowed += 1
|
||||
with self.assertNumQueries(allowed):
|
||||
str(test_model())
|
||||
|
@ -2,8 +2,7 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.events.models import (
|
||||
@ -11,7 +10,6 @@ from authentik.events.models import (
|
||||
EventAction,
|
||||
Notification,
|
||||
NotificationRule,
|
||||
NotificationSeverity,
|
||||
NotificationTransport,
|
||||
NotificationWebhookMapping,
|
||||
TransportMode,
|
||||
@ -22,7 +20,7 @@ from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.models import PolicyBinding
|
||||
|
||||
|
||||
class TestEventsNotifications(APITestCase):
|
||||
class TestEventsNotifications(TestCase):
|
||||
"""Test Event Notifications"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
@ -133,15 +131,3 @@ class TestEventsNotifications(APITestCase):
|
||||
Notification.objects.all().delete()
|
||||
Event.new(EventAction.CUSTOM_PREFIX).save()
|
||||
self.assertEqual(Notification.objects.first().body, "foo")
|
||||
|
||||
def test_api_mark_all_seen(self):
|
||||
"""Test mark_all_seen"""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
Notification.objects.create(
|
||||
severity=NotificationSeverity.NOTICE, body="foo", user=self.user, seen=False
|
||||
)
|
||||
|
||||
response = self.client.post(reverse("authentik_api:notification-mark-all-seen"))
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertFalse(Notification.objects.filter(body="foo", seen=False).exists())
|
||||
|
@ -110,21 +110,8 @@ class FlowErrorChallenge(Challenge):
|
||||
class AccessDeniedChallenge(WithUserInfoChallenge):
|
||||
"""Challenge when a flow's active stage calls `stage_invalid()`."""
|
||||
|
||||
component = CharField(default="ak-stage-access-denied")
|
||||
|
||||
error_message = CharField(required=False)
|
||||
|
||||
|
||||
class SessionEndChallenge(WithUserInfoChallenge):
|
||||
"""Challenge for ending a session"""
|
||||
|
||||
component = CharField(default="ak-stage-session-end")
|
||||
|
||||
application_name = CharField(required=False)
|
||||
application_launch_url = CharField(required=False)
|
||||
|
||||
invalidation_flow_url = CharField(required=False)
|
||||
brand_name = CharField(required=True)
|
||||
component = CharField(default="ak-stage-access-denied")
|
||||
|
||||
|
||||
class PermissionDict(TypedDict):
|
||||
|
@ -6,18 +6,20 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def set_oobe_flow_authentication(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from guardian.conf import settings as guardian_settings
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
User = apps.get_model("authentik_core", "User")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
users = (
|
||||
User.objects.using(db_alias)
|
||||
.exclude(username="akadmin")
|
||||
.exclude(username=guardian_settings.ANONYMOUS_USER_NAME)
|
||||
)
|
||||
users = User.objects.using(db_alias).exclude(username="akadmin")
|
||||
try:
|
||||
users = users.exclude(pk=get_anonymous_user().pk)
|
||||
|
||||
except Exception: # nosec
|
||||
pass
|
||||
|
||||
if users.exists():
|
||||
Flow.objects.using(db_alias).filter(slug="initial-setup").update(
|
||||
authentication="require_superuser"
|
||||
|
@ -107,9 +107,7 @@ class Stage(SerializerModel):
|
||||
|
||||
|
||||
def in_memory_stage(view: type["StageView"], **kwargs) -> Stage:
|
||||
"""Creates an in-memory stage instance, based on a `view` as view.
|
||||
Any key-word arguments are set as attributes on the stage object,
|
||||
accessible via `self.executor.current_stage`."""
|
||||
"""Creates an in-memory stage instance, based on a `view` as view."""
|
||||
stage = Stage()
|
||||
# Because we can't pickle a locally generated function,
|
||||
# we set the view as a separate property and reference a generic function
|
||||
|
@ -23,7 +23,6 @@ from authentik.flows.models import (
|
||||
in_memory_stage,
|
||||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
@ -33,7 +32,6 @@ PLAN_CONTEXT_SSO = "is_sso"
|
||||
PLAN_CONTEXT_REDIRECT = "redirect"
|
||||
PLAN_CONTEXT_APPLICATION = "application"
|
||||
PLAN_CONTEXT_SOURCE = "source"
|
||||
PLAN_CONTEXT_OUTPOST = "outpost"
|
||||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||
# was restored.
|
||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||
@ -145,28 +143,15 @@ class FlowPlanner:
|
||||
and not request.user.is_superuser
|
||||
):
|
||||
raise FlowNonApplicableException()
|
||||
outpost_user = ClientIPMiddleware.get_outpost_user(request)
|
||||
if self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_OUTPOST:
|
||||
outpost_user = ClientIPMiddleware.get_outpost_user(request)
|
||||
if not outpost_user:
|
||||
raise FlowNonApplicableException()
|
||||
if outpost_user:
|
||||
outpost = Outpost.objects.filter(
|
||||
# TODO: Since Outpost and user are not directly connected, we have to look up a user
|
||||
# like this. This should ideally by in authentik/outposts/models.py
|
||||
pk=outpost_user.username.replace("ak-outpost-", "")
|
||||
).first()
|
||||
if outpost:
|
||||
return {
|
||||
PLAN_CONTEXT_OUTPOST: {
|
||||
"instance": outpost,
|
||||
}
|
||||
}
|
||||
return {}
|
||||
|
||||
def plan(self, request: HttpRequest, default_context: dict[str, Any] | None = None) -> FlowPlan:
|
||||
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
||||
and return ordered list"""
|
||||
with start_span(op="authentik.flow.planner.plan", name=self.flow.slug) as span:
|
||||
with start_span(op="authentik.flow.planner.plan", description=self.flow.slug) as span:
|
||||
span: Span
|
||||
span.set_data("flow", self.flow)
|
||||
span.set_data("request", request)
|
||||
@ -174,12 +159,11 @@ class FlowPlanner:
|
||||
self._logger.debug(
|
||||
"f(plan): starting planning process",
|
||||
)
|
||||
context = default_context or {}
|
||||
# Bit of a workaround here, if there is a pending user set in the default context
|
||||
# we use that user for our cache key
|
||||
# to make sure they don't get the generic response
|
||||
if context and PLAN_CONTEXT_PENDING_USER in context:
|
||||
user = context[PLAN_CONTEXT_PENDING_USER]
|
||||
if default_context and PLAN_CONTEXT_PENDING_USER in default_context:
|
||||
user = default_context[PLAN_CONTEXT_PENDING_USER]
|
||||
else:
|
||||
user = request.user
|
||||
# We only need to check the flow authentication if it's planned without a user
|
||||
@ -187,13 +171,14 @@ class FlowPlanner:
|
||||
# or if a flow is restarted due to `invalid_response_action` being set to
|
||||
# `restart_with_context`, which can only happen if the user was already authorized
|
||||
# to use the flow
|
||||
context.update(self._check_authentication(request))
|
||||
self._check_authentication(request)
|
||||
# First off, check the flow's direct policy bindings
|
||||
# to make sure the user even has access to the flow
|
||||
engine = PolicyEngine(self.flow, user, request)
|
||||
engine.use_cache = self.use_cache
|
||||
span.set_data("context", cleanse_dict(context))
|
||||
engine.request.context.update(context)
|
||||
if default_context:
|
||||
span.set_data("default_context", cleanse_dict(default_context))
|
||||
engine.request.context.update(default_context)
|
||||
engine.build()
|
||||
result = engine.result
|
||||
if not result.passing:
|
||||
@ -210,12 +195,12 @@ class FlowPlanner:
|
||||
key=cached_plan_key,
|
||||
)
|
||||
# Reset the context as this isn't factored into caching
|
||||
cached_plan.context = context
|
||||
cached_plan.context = default_context or {}
|
||||
return cached_plan
|
||||
self._logger.debug(
|
||||
"f(plan): building plan",
|
||||
)
|
||||
plan = self._build_plan(user, request, context)
|
||||
plan = self._build_plan(user, request, default_context)
|
||||
if self.use_cache:
|
||||
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||
if not plan.bindings and not self.allow_empty_flows:
|
||||
@ -233,7 +218,7 @@ class FlowPlanner:
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.planner.build_plan",
|
||||
name=self.flow.slug,
|
||||
description=self.flow.slug,
|
||||
) as span,
|
||||
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time(),
|
||||
):
|
||||
|
@ -13,7 +13,7 @@ from rest_framework.request import Request
|
||||
from sentry_sdk import start_span
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.core.models import Application, User
|
||||
from authentik.core.models import User
|
||||
from authentik.flows.challenge import (
|
||||
AccessDeniedChallenge,
|
||||
Challenge,
|
||||
@ -21,7 +21,6 @@ from authentik.flows.challenge import (
|
||||
ContextualFlowInfo,
|
||||
HttpChallengeResponse,
|
||||
RedirectChallenge,
|
||||
SessionEndChallenge,
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
@ -126,7 +125,7 @@ class ChallengeStageView(StageView):
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.stage.challenge_invalid",
|
||||
name=self.__class__.__name__,
|
||||
description=self.__class__.__name__,
|
||||
),
|
||||
HIST_FLOWS_STAGE_TIME.labels(
|
||||
stage_type=self.__class__.__name__, method="challenge_invalid"
|
||||
@ -136,7 +135,7 @@ class ChallengeStageView(StageView):
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.stage.challenge_valid",
|
||||
name=self.__class__.__name__,
|
||||
description=self.__class__.__name__,
|
||||
),
|
||||
HIST_FLOWS_STAGE_TIME.labels(
|
||||
stage_type=self.__class__.__name__, method="challenge_valid"
|
||||
@ -162,7 +161,7 @@ class ChallengeStageView(StageView):
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.stage.get_challenge",
|
||||
name=self.__class__.__name__,
|
||||
description=self.__class__.__name__,
|
||||
),
|
||||
HIST_FLOWS_STAGE_TIME.labels(
|
||||
stage_type=self.__class__.__name__, method="get_challenge"
|
||||
@ -175,7 +174,7 @@ class ChallengeStageView(StageView):
|
||||
return self.executor.stage_invalid()
|
||||
with start_span(
|
||||
op="authentik.flow.stage._get_challenge",
|
||||
name=self.__class__.__name__,
|
||||
description=self.__class__.__name__,
|
||||
):
|
||||
if not hasattr(challenge, "initial_data"):
|
||||
challenge.initial_data = {}
|
||||
@ -231,7 +230,7 @@ class ChallengeStageView(StageView):
|
||||
return HttpChallengeResponse(challenge_response)
|
||||
|
||||
|
||||
class AccessDeniedStage(ChallengeStageView):
|
||||
class AccessDeniedChallengeView(ChallengeStageView):
|
||||
"""Used internally by FlowExecutor's stage_invalid()"""
|
||||
|
||||
error_message: str | None
|
||||
@ -269,31 +268,3 @@ class RedirectStage(ChallengeStageView):
|
||||
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||
return HttpChallengeResponse(self.get_challenge())
|
||||
|
||||
|
||||
class SessionEndStage(ChallengeStageView):
|
||||
"""Stage inserted when a flow is used as invalidation flow. By default shows actions
|
||||
that the user is likely to take after signing out of a provider."""
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
application: Application | None = self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION)
|
||||
data = {
|
||||
"component": "ak-stage-session-end",
|
||||
"brand_name": self.request.brand.branding_title,
|
||||
}
|
||||
if application:
|
||||
data["application_name"] = application.name
|
||||
data["application_launch_url"] = application.get_launch_url(self.get_pending_user())
|
||||
if self.request.brand.flow_invalidation:
|
||||
data["invalidation_flow_url"] = reverse(
|
||||
"authentik_core:if-flow",
|
||||
kwargs={
|
||||
"flow_slug": self.request.brand.flow_invalidation.slug,
|
||||
},
|
||||
)
|
||||
return SessionEndChallenge(data=data)
|
||||
|
||||
# This can never be reached since this challenge is created on demand and only the
|
||||
# .get() method is called
|
||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: # pragma: no cover
|
||||
return self.executor.cancel()
|
||||
|
@ -54,7 +54,7 @@ from authentik.flows.planner import (
|
||||
FlowPlan,
|
||||
FlowPlanner,
|
||||
)
|
||||
from authentik.flows.stage import AccessDeniedStage, StageView
|
||||
from authentik.flows.stage import AccessDeniedChallengeView, StageView
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||
@ -153,7 +153,7 @@ class FlowExecutorView(APIView):
|
||||
return plan
|
||||
|
||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||
with start_span(op="authentik.flow.executor.dispatch", name=self.flow.slug) as span:
|
||||
with start_span(op="authentik.flow.executor.dispatch", description=self.flow.slug) as span:
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
get_params = QueryDict(request.GET.get(QS_QUERY, ""))
|
||||
if QS_KEY_TOKEN in get_params:
|
||||
@ -273,7 +273,7 @@ class FlowExecutorView(APIView):
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.executor.stage",
|
||||
name=class_path,
|
||||
description=class_path,
|
||||
) as span,
|
||||
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
||||
method=request.method.upper(),
|
||||
@ -324,7 +324,7 @@ class FlowExecutorView(APIView):
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.flow.executor.stage",
|
||||
name=class_path,
|
||||
description=class_path,
|
||||
) as span,
|
||||
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
||||
method=request.method.upper(),
|
||||
@ -441,7 +441,7 @@ class FlowExecutorView(APIView):
|
||||
)
|
||||
return self.restart_flow(keep_context)
|
||||
self.cancel()
|
||||
challenge_view = AccessDeniedStage(self, error_message)
|
||||
challenge_view = AccessDeniedChallengeView(self, error_message)
|
||||
challenge_view.request = self.request
|
||||
return to_stage_response(self.request, challenge_view.get(self.request))
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
# update website/docs/install-config/configuration/configuration.mdx
|
||||
# update website/docs/installation/configuration.mdx
|
||||
# This is the default configuration file
|
||||
postgresql:
|
||||
host: localhost
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import re
|
||||
import socket
|
||||
from collections.abc import Iterable
|
||||
from ipaddress import ip_address, ip_network
|
||||
from textwrap import indent
|
||||
from types import CodeType
|
||||
@ -27,12 +28,6 @@ from authentik.stages.authenticator import devices_for_user
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
ARG_SANITIZE = re.compile(r"[:.-]")
|
||||
|
||||
|
||||
def sanitize_arg(arg_name: str) -> str:
|
||||
return re.sub(ARG_SANITIZE, "_", arg_name)
|
||||
|
||||
|
||||
class BaseEvaluator:
|
||||
"""Validate and evaluate python-based expressions"""
|
||||
@ -182,9 +177,9 @@ class BaseEvaluator:
|
||||
proc = PolicyProcess(PolicyBinding(policy=policy), request=req, connection=None)
|
||||
return proc.profiling_wrapper()
|
||||
|
||||
def wrap_expression(self, expression: str) -> str:
|
||||
def wrap_expression(self, expression: str, params: Iterable[str]) -> str:
|
||||
"""Wrap expression in a function, call it, and save the result as `result`"""
|
||||
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())
|
||||
handler_signature = ",".join(params)
|
||||
full_expression = ""
|
||||
full_expression += f"def handler({handler_signature}):\n"
|
||||
full_expression += indent(expression, " ")
|
||||
@ -193,8 +188,8 @@ class BaseEvaluator:
|
||||
|
||||
def compile(self, expression: str) -> CodeType:
|
||||
"""Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect."""
|
||||
expression = self.wrap_expression(expression)
|
||||
return compile(expression, self._filename, "exec")
|
||||
param_keys = self._context.keys()
|
||||
return compile(self.wrap_expression(expression, param_keys), self._filename, "exec")
|
||||
|
||||
def evaluate(self, expression_source: str) -> Any:
|
||||
"""Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised.
|
||||
@ -210,7 +205,7 @@ class BaseEvaluator:
|
||||
self.handle_error(exc, expression_source)
|
||||
raise exc
|
||||
try:
|
||||
_locals = {sanitize_arg(x): y for x, y in self._context.items()}
|
||||
_locals = self._context
|
||||
# Yes this is an exec, yes it is potentially bad. Since we limit what variables are
|
||||
# available here, and these policies can only be edited by admins, this is a risk
|
||||
# we're willing to take.
|
||||
|
@ -30,11 +30,6 @@ class TestHTTP(TestCase):
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.2")
|
||||
|
||||
def test_forward_for_invalid(self):
|
||||
"""Test invalid forward for"""
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="foobar")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), ClientIPMiddleware.default_ip)
|
||||
|
||||
def test_fake_outpost(self):
|
||||
"""Test faked IP which is overridden by an outpost"""
|
||||
token = Token.objects.create(
|
||||
@ -58,17 +53,6 @@ class TestHTTP(TestCase):
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
# Invalid, not a real IP
|
||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
self.user.save()
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
ClientIPMiddleware.outpost_remote_ip_header: "foobar",
|
||||
ClientIPMiddleware.outpost_token_header: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
# Valid
|
||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
self.user.save()
|
||||
|
@ -9,7 +9,7 @@ from uuid import uuid4
|
||||
from dacite.core import from_dict
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.core.cache import cache
|
||||
from django.db import models, transaction
|
||||
from django.db import IntegrityError, models, transaction
|
||||
from django.db.models.base import Model
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from guardian.models import UserObjectPermission
|
||||
@ -53,7 +53,7 @@ class ServiceConnectionInvalid(SentryIgnoredException):
|
||||
class OutpostConfig:
|
||||
"""Configuration an outpost uses to configure it self"""
|
||||
|
||||
# update website/docs/add-secure-apps/outposts/_config.md
|
||||
# update website/docs/outposts/_config.md
|
||||
|
||||
authentik_host: str = ""
|
||||
authentik_host_insecure: bool = False
|
||||
@ -380,22 +380,26 @@ class Outpost(SerializerModel, ManagedModel):
|
||||
"""Get/create token for auto-generated user"""
|
||||
managed = f"goauthentik.io/outpost/{self.token_identifier}"
|
||||
tokens = Token.filter_not_expired(
|
||||
delete_expired=True,
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
managed=managed,
|
||||
)
|
||||
token: Token | None = tokens.first()
|
||||
if token:
|
||||
return token
|
||||
return Token.objects.create(
|
||||
user=self.user,
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
description=f"Autogenerated by authentik for Outpost {self.name}",
|
||||
expiring=False,
|
||||
managed=managed,
|
||||
)
|
||||
if tokens.exists():
|
||||
return tokens.first()
|
||||
try:
|
||||
return Token.objects.create(
|
||||
user=self.user,
|
||||
identifier=self.token_identifier,
|
||||
intent=TokenIntents.INTENT_API,
|
||||
description=f"Autogenerated by authentik for Outpost {self.name}",
|
||||
expiring=False,
|
||||
managed=managed,
|
||||
)
|
||||
except IntegrityError:
|
||||
# Integrity error happens mostly when managed is reused
|
||||
Token.objects.filter(managed=managed).delete()
|
||||
Token.objects.filter(identifier=self.token_identifier).delete()
|
||||
return self.token
|
||||
|
||||
def get_required_objects(self) -> Iterable[models.Model | str]:
|
||||
"""Get an iterator of all objects the user needs read access to"""
|
||||
|
@ -113,7 +113,7 @@ class PolicyEngine:
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.policy.engine.build",
|
||||
name=self.__pbm,
|
||||
description=self.__pbm,
|
||||
) as span,
|
||||
HIST_POLICIES_ENGINE_TOTAL_TIME.labels(
|
||||
obj_type=class_to_path(self.__pbm.__class__),
|
||||
|
@ -87,7 +87,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
||||
|
||||
application_slug = SerializerMethodField()
|
||||
bind_flow_slug = CharField(source="authorization_flow.slug")
|
||||
unbind_flow_slug = SerializerMethodField()
|
||||
|
||||
def get_application_slug(self, instance: LDAPProvider) -> str:
|
||||
"""Prioritise backchannel slug over direct application slug"""
|
||||
@ -95,16 +94,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
||||
return instance.backchannel_application.slug
|
||||
return instance.application.slug
|
||||
|
||||
def get_unbind_flow_slug(self, instance: LDAPProvider) -> str | None:
|
||||
"""Get slug for unbind flow, defaulting to brand's default flow."""
|
||||
flow = instance.invalidation_flow
|
||||
if not flow and "request" in self.context:
|
||||
request = self.context.get("request")
|
||||
flow = request.brand.flow_invalidation
|
||||
if not flow:
|
||||
return None
|
||||
return flow.slug
|
||||
|
||||
class Meta:
|
||||
model = LDAPProvider
|
||||
fields = [
|
||||
@ -112,7 +101,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
||||
"name",
|
||||
"base_dn",
|
||||
"bind_flow_slug",
|
||||
"unbind_flow_slug",
|
||||
"application_slug",
|
||||
"certificate",
|
||||
"tls_server_name",
|
||||
|
@ -4,13 +4,13 @@ from django.apps.registry import Apps
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.auth.management import create_permissions
|
||||
|
||||
|
||||
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from guardian.shortcuts import assign_perm
|
||||
from authentik.core.models import User
|
||||
from django.apps import apps as real_apps
|
||||
from django.contrib.auth.management import create_permissions
|
||||
from guardian.shortcuts import UserObjectPermission
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
@ -20,25 +20,14 @@ def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
create_permissions(real_apps.get_app_config("authentik_providers_ldap"), using=db_alias)
|
||||
|
||||
LDAPProvider = apps.get_model("authentik_providers_ldap", "ldapprovider")
|
||||
Permission = apps.get_model("auth", "Permission")
|
||||
UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
|
||||
ContentType = apps.get_model("contenttypes", "ContentType")
|
||||
|
||||
new_prem = Permission.objects.using(db_alias).get(codename="search_full_directory")
|
||||
ct = ContentType.objects.using(db_alias).get(
|
||||
app_label="authentik_providers_ldap",
|
||||
model="ldapprovider",
|
||||
)
|
||||
|
||||
for provider in LDAPProvider.objects.using(db_alias).all():
|
||||
if not provider.search_group:
|
||||
continue
|
||||
for user in provider.search_group.users.using(db_alias).all():
|
||||
UserObjectPermission.objects.using(db_alias).create(
|
||||
user=user,
|
||||
permission=new_prem,
|
||||
object_pk=provider.pk,
|
||||
content_type=ct,
|
||||
for user_pk in (
|
||||
provider.search_group.users.using(db_alias).all().values_list("pk", flat=True)
|
||||
):
|
||||
# We need the correct user model instance to assign the permission
|
||||
assign_perm(
|
||||
"search_full_directory", User.objects.using(db_alias).get(pk=user_pk), provider
|
||||
)
|
||||
|
||||
|
||||
@ -46,7 +35,6 @@ class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_ldap", "0003_ldapprovider_mfa_support_and_more"),
|
||||
("guardian", "0002_generic_permissions_index"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
@ -1,23 +0,0 @@
|
||||
# Generated by Django 5.0.9 on 2024-09-26 16:25
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_oauth2", "0018_alter_accesstoken_expires_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="accesstoken",
|
||||
index=models.Index(fields=["token"], name="authentik_p_token_4bc870_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="refreshtoken",
|
||||
index=models.Index(fields=["token"], name="authentik_p_token_1a841f_idx"),
|
||||
),
|
||||
]
|
@ -1,31 +0,0 @@
|
||||
# Generated by Django 5.0.9 on 2024-09-27 14:50
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_oauth2", "0019_accesstoken_authentik_p_token_4bc870_idx_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name="accesstoken",
|
||||
name="authentik_p_token_4bc870_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="refreshtoken",
|
||||
name="authentik_p_token_1a841f_idx",
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="accesstoken",
|
||||
index=models.Index(fields=["token", "provider"], name="authentik_p_token_f99422_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="refreshtoken",
|
||||
index=models.Index(fields=["token", "provider"], name="authentik_p_token_a1d921_idx"),
|
||||
),
|
||||
]
|
@ -376,9 +376,6 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
_id_token = models.TextField()
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=["token", "provider"]),
|
||||
]
|
||||
verbose_name = _("OAuth2 Access Token")
|
||||
verbose_name_plural = _("OAuth2 Access Tokens")
|
||||
|
||||
@ -422,9 +419,6 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
_id_token = models.TextField(verbose_name=_("ID Token"))
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=["token", "provider"]),
|
||||
]
|
||||
verbose_name = _("OAuth2 Refresh Token")
|
||||
verbose_name_plural = _("OAuth2 Refresh Tokens")
|
||||
|
||||
|
@ -29,6 +29,7 @@ class TesOAuth2Introspection(OAuthTestCase):
|
||||
self.app = Application.objects.create(
|
||||
name=generate_id(), slug=generate_id(), provider=self.provider
|
||||
)
|
||||
self.app.save()
|
||||
self.user = create_test_admin_user()
|
||||
self.auth = b64encode(
|
||||
f"{self.provider.client_id}:{self.provider.client_secret}".encode()
|
||||
@ -113,41 +114,6 @@ class TesOAuth2Introspection(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_introspect_invalid_provider(self):
|
||||
"""Test introspection (mismatched provider and token)"""
|
||||
provider: OAuth2Provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="",
|
||||
signing_key=create_test_cert(),
|
||||
)
|
||||
auth = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
||||
|
||||
token: AccessToken = AccessToken.objects.create(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
token=generate_id(),
|
||||
auth_time=timezone.now(),
|
||||
_scope="openid user profile",
|
||||
_id_token=json.dumps(
|
||||
asdict(
|
||||
IDToken("foo", "bar"),
|
||||
)
|
||||
),
|
||||
)
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token-introspection"),
|
||||
HTTP_AUTHORIZATION=f"Basic {auth}",
|
||||
data={"token": token.token},
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
self.assertJSONEqual(
|
||||
res.content.decode(),
|
||||
{
|
||||
"active": False,
|
||||
},
|
||||
)
|
||||
|
||||
def test_introspect_invalid_auth(self):
|
||||
"""Test introspect (invalid auth)"""
|
||||
res = self.client.post(
|
||||
|
@ -12,7 +12,6 @@ from authentik.providers.oauth2.api.tokens import (
|
||||
)
|
||||
from authentik.providers.oauth2.views.authorize import AuthorizationFlowInitView
|
||||
from authentik.providers.oauth2.views.device_backchannel import DeviceView
|
||||
from authentik.providers.oauth2.views.end_session import EndSessionView
|
||||
from authentik.providers.oauth2.views.introspection import TokenIntrospectionView
|
||||
from authentik.providers.oauth2.views.jwks import JWKSView
|
||||
from authentik.providers.oauth2.views.provider import ProviderInfoView
|
||||
@ -45,7 +44,7 @@ urlpatterns = [
|
||||
),
|
||||
path(
|
||||
"<slug:application_slug>/end-session/",
|
||||
EndSessionView.as_view(),
|
||||
RedirectView.as_view(pattern_name="authentik_core:if-session-end", query_string=True),
|
||||
name="end-session",
|
||||
),
|
||||
path("<slug:application_slug>/jwks/", JWKSView.as_view(), name="jwks"),
|
||||
|
@ -1,45 +0,0 @@
|
||||
"""oauth2 provider end_session Views"""
|
||||
|
||||
from django.http import Http404, HttpRequest, HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.flows.models import Flow, in_memory_stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
||||
from authentik.flows.stage import SessionEndStage
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
|
||||
|
||||
class EndSessionView(PolicyAccessView):
|
||||
"""Redirect to application's provider's invalidation flow"""
|
||||
|
||||
flow: Flow
|
||||
|
||||
def resolve_provider_application(self):
|
||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||
self.provider = self.application.get_provider()
|
||||
if not self.provider:
|
||||
raise Http404
|
||||
self.flow = self.provider.invalidation_flow or self.request.brand.flow_invalidation
|
||||
if not self.flow:
|
||||
raise Http404
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Dispatch the flow planner for the invalidation flow"""
|
||||
planner = FlowPlanner(self.flow)
|
||||
planner.allow_empty_flows = True
|
||||
plan = planner.plan(
|
||||
request,
|
||||
{
|
||||
PLAN_CONTEXT_APPLICATION: self.application,
|
||||
},
|
||||
)
|
||||
plan.insert_stage(in_memory_stage(SessionEndStage))
|
||||
request.session[SESSION_KEY_PLAN] = plan
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
self.request.GET,
|
||||
flow_slug=self.flow.slug,
|
||||
)
|
@ -46,10 +46,10 @@ class TokenIntrospectionParams:
|
||||
if not provider:
|
||||
raise TokenIntrospectionError
|
||||
|
||||
access_token = AccessToken.objects.filter(token=raw_token, provider=provider).first()
|
||||
access_token = AccessToken.objects.filter(token=raw_token).first()
|
||||
if access_token:
|
||||
return TokenIntrospectionParams(access_token, provider)
|
||||
refresh_token = RefreshToken.objects.filter(token=raw_token, provider=provider).first()
|
||||
refresh_token = RefreshToken.objects.filter(token=raw_token).first()
|
||||
if refresh_token:
|
||||
return TokenIntrospectionParams(refresh_token, provider)
|
||||
LOGGER.debug("Token does not exist", token=raw_token)
|
||||
|
@ -28,7 +28,7 @@ class ProxyDockerController(DockerController):
|
||||
labels = super()._get_labels()
|
||||
labels["traefik.enable"] = "true"
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = (
|
||||
f"({' || '.join([f'Host({host})' for host in hosts])})"
|
||||
f"({' || '.join([f'Host(`{host}`)' for host in hosts])})"
|
||||
f" && PathPrefix(`/outpost.goauthentik.io`)"
|
||||
)
|
||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||
|
@ -24,7 +24,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": generate_id(),
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
"internal_host": "http://localhost",
|
||||
"basic_auth_enabled": True,
|
||||
@ -42,7 +41,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": generate_id(),
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
"internal_host": "http://localhost",
|
||||
"basic_auth_enabled": True,
|
||||
@ -66,7 +64,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": generate_id(),
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
},
|
||||
)
|
||||
@ -85,7 +82,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": name,
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
"internal_host": "http://localhost",
|
||||
},
|
||||
@ -103,7 +99,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": name,
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
"internal_host": "http://localhost",
|
||||
},
|
||||
@ -119,7 +114,6 @@ class ProxyProviderTests(APITestCase):
|
||||
"name": name,
|
||||
"mode": ProxyMode.PROXY,
|
||||
"authorization_flow": create_test_flow().pk.hex,
|
||||
"invalidation_flow": create_test_flow().pk.hex,
|
||||
"external_host": "http://localhost",
|
||||
"internal_host": "http://localhost",
|
||||
},
|
||||
|
@ -188,9 +188,6 @@ class SAMLProviderImportSerializer(PassiveSerializer):
|
||||
authorization_flow = PrimaryKeyRelatedField(
|
||||
queryset=Flow.objects.filter(designation=FlowDesignation.AUTHORIZATION),
|
||||
)
|
||||
invalidation_flow = PrimaryKeyRelatedField(
|
||||
queryset=Flow.objects.filter(designation=FlowDesignation.INVALIDATION),
|
||||
)
|
||||
file = FileField()
|
||||
|
||||
|
||||
@ -280,9 +277,7 @@ class SAMLProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
try:
|
||||
metadata = ServiceProviderMetadataParser().parse(file.read().decode())
|
||||
metadata.to_provider(
|
||||
data.validated_data["name"],
|
||||
data.validated_data["authorization_flow"],
|
||||
data.validated_data["invalidation_flow"],
|
||||
data.validated_data["name"], data.validated_data["authorization_flow"]
|
||||
)
|
||||
except ValueError as exc: # pragma: no cover
|
||||
LOGGER.warning(str(exc))
|
||||
|
@ -49,13 +49,12 @@ class ServiceProviderMetadata:
|
||||
|
||||
signing_keypair: CertificateKeyPair | None = None
|
||||
|
||||
def to_provider(
|
||||
self, name: str, authorization_flow: Flow, invalidation_flow: Flow
|
||||
) -> SAMLProvider:
|
||||
def to_provider(self, name: str, authorization_flow: Flow) -> SAMLProvider:
|
||||
"""Create a SAMLProvider instance from the details. `name` is required,
|
||||
as depending on the metadata CertificateKeypairs might have to be created."""
|
||||
provider = SAMLProvider.objects.create(
|
||||
name=name, authorization_flow=authorization_flow, invalidation_flow=invalidation_flow
|
||||
name=name,
|
||||
authorization_flow=authorization_flow,
|
||||
)
|
||||
provider.issuer = self.entity_id
|
||||
provider.sp_binding = self.acs_binding
|
||||
|
@ -47,12 +47,11 @@ class TestSAMLProviderAPI(APITestCase):
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow().pk,
|
||||
"invalidation_flow": create_test_flow().pk,
|
||||
"acs_url": "http://localhost",
|
||||
"signing_kp": cert.pk,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(400, response.status_code)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{
|
||||
@ -69,13 +68,12 @@ class TestSAMLProviderAPI(APITestCase):
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow().pk,
|
||||
"invalidation_flow": create_test_flow().pk,
|
||||
"acs_url": "http://localhost",
|
||||
"signing_kp": cert.pk,
|
||||
"sign_assertion": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(201, response.status_code)
|
||||
|
||||
def test_metadata(self):
|
||||
"""Test metadata export (normal)"""
|
||||
@ -133,7 +131,6 @@ class TestSAMLProviderAPI(APITestCase):
|
||||
"file": metadata,
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow(FlowDesignation.AUTHORIZATION).pk,
|
||||
"invalidation_flow": create_test_flow(FlowDesignation.INVALIDATION).pk,
|
||||
},
|
||||
format="multipart",
|
||||
)
|
||||
|
@ -82,7 +82,7 @@ class TestServiceProviderMetadataParser(TestCase):
|
||||
def test_simple(self):
|
||||
"""Test simple metadata without Signing"""
|
||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
||||
provider = metadata.to_provider("test", self.flow, self.flow)
|
||||
provider = metadata.to_provider("test", self.flow)
|
||||
self.assertEqual(provider.acs_url, "http://localhost:8080/saml/acs")
|
||||
self.assertEqual(provider.issuer, "http://localhost:8080/saml/metadata")
|
||||
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
||||
@ -95,7 +95,7 @@ class TestServiceProviderMetadataParser(TestCase):
|
||||
"""Test Metadata with signing cert"""
|
||||
create_test_cert()
|
||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/cert.xml"))
|
||||
provider = metadata.to_provider("test", self.flow, self.flow)
|
||||
provider = metadata.to_provider("test", self.flow)
|
||||
self.assertEqual(provider.acs_url, "http://localhost:8080/apps/user_saml/saml/acs")
|
||||
self.assertEqual(provider.issuer, "http://localhost:8080/apps/user_saml/saml/metadata")
|
||||
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""SLO Views"""
|
||||
|
||||
from django.http import Http404, HttpRequest
|
||||
from django.http import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
@ -10,11 +10,6 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.flows.models import Flow, in_memory_stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
||||
from authentik.flows.stage import SessionEndStage
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.lib.utils.urls import redirect_with_qs
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
from authentik.providers.saml.exceptions import CannotHandleAssertion
|
||||
@ -33,16 +28,11 @@ class SAMLSLOView(PolicyAccessView):
|
||||
""" "SAML SLO Base View, which plans a flow and injects our final stage.
|
||||
Calls get/post handler."""
|
||||
|
||||
flow: Flow
|
||||
|
||||
def resolve_provider_application(self):
|
||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||
self.provider: SAMLProvider = get_object_or_404(
|
||||
SAMLProvider, pk=self.application.provider_id
|
||||
)
|
||||
self.flow = self.provider.invalidation_flow or self.request.brand.flow_invalidation
|
||||
if not self.flow:
|
||||
raise Http404
|
||||
|
||||
def check_saml_request(self) -> HttpRequest | None:
|
||||
"""Handler to verify the SAML Request. Must be implemented by a subclass"""
|
||||
@ -55,20 +45,9 @@ class SAMLSLOView(PolicyAccessView):
|
||||
method_response = self.check_saml_request()
|
||||
if method_response:
|
||||
return method_response
|
||||
planner = FlowPlanner(self.flow)
|
||||
planner.allow_empty_flows = True
|
||||
plan = planner.plan(
|
||||
request,
|
||||
{
|
||||
PLAN_CONTEXT_APPLICATION: self.application,
|
||||
},
|
||||
)
|
||||
plan.insert_stage(in_memory_stage(SessionEndStage))
|
||||
request.session[SESSION_KEY_PLAN] = plan
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
self.request.GET,
|
||||
flow_slug=self.flow.slug,
|
||||
return redirect(
|
||||
"authentik_core:if-session-end",
|
||||
application_slug=self.kwargs["application_slug"],
|
||||
)
|
||||
|
||||
def post(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
|
@ -26,7 +26,6 @@ class SCIMProviderSerializer(ProviderSerializer):
|
||||
"verbose_name_plural",
|
||||
"meta_model_name",
|
||||
"url",
|
||||
"verify_certificates",
|
||||
"token",
|
||||
"exclude_users_service_account",
|
||||
"filter_group",
|
||||
|
@ -42,7 +42,6 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
def __init__(self, provider: SCIMProvider):
|
||||
super().__init__(provider)
|
||||
self._session = get_http_session()
|
||||
self._session.verify = provider.verify_certificates
|
||||
self.provider = provider
|
||||
# Remove trailing slashes as we assume the URL doesn't have any
|
||||
base_url = provider.url
|
||||
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.0.9 on 2024-09-19 14:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_scim", "0009_alter_scimmapping_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scimprovider",
|
||||
name="verify_certificates",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
@ -68,7 +68,6 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||
|
||||
url = models.TextField(help_text=_("Base URL to SCIM requests, usually ends in /v2"))
|
||||
token = models.TextField(help_text=_("Authentication token"))
|
||||
verify_certificates = models.BooleanField(default=True)
|
||||
|
||||
property_mappings_group = models.ManyToManyField(
|
||||
PropertyMapping,
|
||||
|
@ -22,7 +22,7 @@ def create_admin_group(user: User) -> Group:
|
||||
return group
|
||||
|
||||
|
||||
def create_recovery_token(user: User, expiry: datetime, generated_from: str) -> tuple[Token, str]:
|
||||
def create_recovery_token(user: User, expiry: datetime, generated_from: str) -> (Token, str):
|
||||
"""Create recovery token and associated link"""
|
||||
_now = now()
|
||||
token = Token.objects.create(
|
||||
|
@ -87,11 +87,7 @@ def task_error_hook(task_id: str, exception: Exception, traceback, *args, **kwar
|
||||
|
||||
def _get_startup_tasks_default_tenant() -> list[Callable]:
|
||||
"""Get all tasks to be run on startup for the default tenant"""
|
||||
from authentik.outposts.tasks import outpost_connection_discovery
|
||||
|
||||
return [
|
||||
outpost_connection_discovery,
|
||||
]
|
||||
return []
|
||||
|
||||
|
||||
def _get_startup_tasks_all_tenants() -> list[Callable]:
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from hashlib import sha512
|
||||
from ipaddress import ip_address
|
||||
from time import perf_counter, time
|
||||
from typing import Any
|
||||
|
||||
@ -175,7 +174,6 @@ class ClientIPMiddleware:
|
||||
|
||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||
self.get_response = get_response
|
||||
self.logger = get_logger().bind()
|
||||
|
||||
def _get_client_ip_from_meta(self, meta: dict[str, Any]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
@ -187,16 +185,11 @@ class ClientIPMiddleware:
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"REMOTE_ADDR",
|
||||
)
|
||||
try:
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
# Ensure the IP parses as a valid IP
|
||||
return str(ip_address(ips[0].strip()))
|
||||
return self.default_ip
|
||||
except ValueError as exc:
|
||||
self.logger.debug("Invalid remote IP", exc=exc)
|
||||
return self.default_ip
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
return ips[0].strip()
|
||||
return self.default_ip
|
||||
|
||||
# FIXME: this should probably not be in `root` but rather in a middleware in `outposts`
|
||||
# but for now it's fine
|
||||
@ -228,16 +221,12 @@ class ClientIPMiddleware:
|
||||
)
|
||||
return None
|
||||
# Update sentry scope to include correct IP
|
||||
sentry_user = Scope.get_isolation_scope()._user or {}
|
||||
sentry_user["ip_address"] = delegated_ip
|
||||
Scope.get_isolation_scope().set_user(sentry_user)
|
||||
user = Scope.get_isolation_scope()._user or {}
|
||||
user["ip_address"] = delegated_ip
|
||||
Scope.get_isolation_scope().set_user(user)
|
||||
# Set the outpost service account on the request
|
||||
setattr(request, self.request_attr_outpost_user, user)
|
||||
try:
|
||||
return str(ip_address(delegated_ip))
|
||||
except ValueError as exc:
|
||||
self.logger.debug("Invalid remote IP from Outpost", exc=exc)
|
||||
return None
|
||||
return delegated_ip
|
||||
|
||||
def _get_client_ip(self, request: HttpRequest | None) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""authentik storage backends"""
|
||||
|
||||
import os
|
||||
from urllib.parse import parse_qsl, urlsplit
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
@ -111,34 +110,3 @@ class S3Storage(BaseS3Storage):
|
||||
if self.querystring_auth:
|
||||
return url
|
||||
return self._strip_signing_parameters(url)
|
||||
|
||||
def _strip_signing_parameters(self, url):
|
||||
# Boto3 does not currently support generating URLs that are unsigned. Instead
|
||||
# we take the signed URLs and strip any querystring params related to signing
|
||||
# and expiration.
|
||||
# Note that this may end up with URLs that are still invalid, especially if
|
||||
# params are passed in that only work with signed URLs, e.g. response header
|
||||
# params.
|
||||
# The code attempts to strip all query parameters that match names of known
|
||||
# parameters from v2 and v4 signatures, regardless of the actual signature
|
||||
# version used.
|
||||
split_url = urlsplit(url)
|
||||
qs = parse_qsl(split_url.query, keep_blank_values=True)
|
||||
blacklist = {
|
||||
"x-amz-algorithm",
|
||||
"x-amz-credential",
|
||||
"x-amz-date",
|
||||
"x-amz-expires",
|
||||
"x-amz-signedheaders",
|
||||
"x-amz-signature",
|
||||
"x-amz-security-token",
|
||||
"awsaccesskeyid",
|
||||
"expires",
|
||||
"signature",
|
||||
}
|
||||
filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist)
|
||||
# Note: Parameters that did not have a value in the original query string will
|
||||
# have an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar=
|
||||
joined_qs = ("=".join(keyval) for keyval in filtered_qs)
|
||||
split_url = split_url._replace(query="&".join(joined_qs))
|
||||
return split_url.geturl()
|
||||
|
@ -3,7 +3,6 @@
|
||||
from typing import Any
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
@ -40,8 +39,9 @@ class LDAPSourceSerializer(SourceSerializer):
|
||||
"""Get cached source connectivity"""
|
||||
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
||||
|
||||
def validate_sync_users_password(self, sync_users_password: bool) -> bool:
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Check that only a single source has password_sync on"""
|
||||
sync_users_password = attrs.get("sync_users_password", True)
|
||||
if sync_users_password:
|
||||
sources = LDAPSource.objects.filter(sync_users_password=True)
|
||||
if self.instance:
|
||||
@ -49,31 +49,11 @@ class LDAPSourceSerializer(SourceSerializer):
|
||||
if sources.exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"sync_users_password": _(
|
||||
"sync_users_password": (
|
||||
"Only a single LDAP Source with password synchronization is allowed"
|
||||
)
|
||||
}
|
||||
)
|
||||
return sync_users_password
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Validate property mappings with sync_ flags"""
|
||||
types = ["user", "group"]
|
||||
for type in types:
|
||||
toggle_value = attrs.get(f"sync_{type}s", False)
|
||||
mappings_field = f"{type}_property_mappings"
|
||||
mappings_value = attrs.get(mappings_field, [])
|
||||
if toggle_value and len(mappings_value) == 0:
|
||||
raise ValidationError(
|
||||
{
|
||||
mappings_field: _(
|
||||
(
|
||||
"When 'Sync {type}s' is enabled, '{type}s property "
|
||||
"mappings' cannot be empty."
|
||||
).format(type=type)
|
||||
)
|
||||
}
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
@ -186,12 +166,11 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
for sync_class in SYNC_CLASSES:
|
||||
class_name = sync_class.name()
|
||||
all_objects.setdefault(class_name, [])
|
||||
for page in sync_class(source).get_objects(size_limit=10):
|
||||
for obj in page:
|
||||
obj: dict
|
||||
obj.pop("raw_attributes", None)
|
||||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
for obj in sync_class(source).get_objects(size_limit=10):
|
||||
obj: dict
|
||||
obj.pop("raw_attributes", None)
|
||||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
return Response(data=all_objects)
|
||||
|
||||
|
||||
|
@ -26,16 +26,17 @@ def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
||||
"""Ensure that source is synced on save (if enabled)"""
|
||||
if not instance.enabled:
|
||||
return
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
||||
# - the user forgets to set them or
|
||||
# - the source is newly created, this is the first save event
|
||||
# and the mappings are created with an m2m event
|
||||
if instance.sync_users and not instance.user_property_mappings.exists():
|
||||
return
|
||||
if instance.sync_groups and not instance.group_property_mappings.exists():
|
||||
if (
|
||||
not instance.user_property_mappings.exists()
|
||||
or not instance.group_property_mappings.exists()
|
||||
):
|
||||
return
|
||||
ldap_sync_single.delay(instance.pk)
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
|
||||
|
||||
@receiver(password_validate)
|
||||
|
@ -38,11 +38,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
search_base=self.base_dn_groups,
|
||||
search_filter=self._source.group_object_filter,
|
||||
search_scope=SUBTREE,
|
||||
attributes=[
|
||||
ALL_ATTRIBUTES,
|
||||
ALL_OPERATIONAL_ATTRIBUTES,
|
||||
self._source.object_uniqueness_field,
|
||||
],
|
||||
attributes=[ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES],
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -57,9 +53,9 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
continue
|
||||
attributes = group.get("attributes", {})
|
||||
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
|
||||
if not attributes.get(self._source.object_uniqueness_field):
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Uniqueness field not found/not set in attributes: '{group_dn}'",
|
||||
f"Cannot find uniqueness field in attributes: '{group_dn}'",
|
||||
attributes=attributes.keys(),
|
||||
dn=group_dn,
|
||||
)
|
||||
|
@ -40,11 +40,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
search_base=self.base_dn_users,
|
||||
search_filter=self._source.user_object_filter,
|
||||
search_scope=SUBTREE,
|
||||
attributes=[
|
||||
ALL_ATTRIBUTES,
|
||||
ALL_OPERATIONAL_ATTRIBUTES,
|
||||
self._source.object_uniqueness_field,
|
||||
],
|
||||
attributes=[ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES],
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -59,9 +55,9 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
continue
|
||||
attributes = user.get("attributes", {})
|
||||
user_dn = flatten(user.get("entryDN", user.get("dn")))
|
||||
if not attributes.get(self._source.object_uniqueness_field):
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Uniqueness field not found/not set in attributes: '{user_dn}'",
|
||||
f"Cannot find uniqueness field in attributes: '{user_dn}'",
|
||||
attributes=attributes.keys(),
|
||||
dn=user_dn,
|
||||
)
|
||||
|
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
@ -78,9 +78,7 @@ class MicrosoftActiveDirectory(BaseLDAPSynchronizer):
|
||||
# /useraccountcontrol-manipulate-account-properties
|
||||
uac_bit = attributes.get("userAccountControl", 512)
|
||||
uac = UserAccountControl(uac_bit)
|
||||
is_active = (
|
||||
UserAccountControl.ACCOUNTDISABLE not in uac and UserAccountControl.LOCKOUT not in uac
|
||||
)
|
||||
is_active = UserAccountControl.ACCOUNTDISABLE not in uac
|
||||
if is_active != user.is_active:
|
||||
user.is_active = is_active
|
||||
user.save()
|
||||
|
@ -50,35 +50,3 @@ class LDAPAPITests(APITestCase):
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
||||
def test_sync_users_mapping_empty(self):
|
||||
"""Check that when sync_users is enabled, property mappings must be set"""
|
||||
serializer = LDAPSourceSerializer(
|
||||
data={
|
||||
"name": "foo",
|
||||
"slug": " foo",
|
||||
"server_uri": "ldaps://1.2.3.4",
|
||||
"bind_cn": "",
|
||||
"bind_password": LDAP_PASSWORD,
|
||||
"base_dn": "dc=foo",
|
||||
"sync_users": True,
|
||||
"user_property_mappings": [],
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
||||
def test_sync_groups_mapping_empty(self):
|
||||
"""Check that when sync_groups is enabled, property mappings must be set"""
|
||||
serializer = LDAPSourceSerializer(
|
||||
data={
|
||||
"name": "foo",
|
||||
"slug": " foo",
|
||||
"server_uri": "ldaps://1.2.3.4",
|
||||
"bind_cn": "",
|
||||
"bind_password": LDAP_PASSWORD,
|
||||
"base_dn": "dc=foo",
|
||||
"sync_groups": True,
|
||||
"group_property_mappings": [],
|
||||
}
|
||||
)
|
||||
self.assertFalse(serializer.is_valid())
|
||||
|
@ -15,13 +15,12 @@ from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
from authentik.stages.identification.stage import LoginChallengeMixin
|
||||
|
||||
LOGGER = get_logger()
|
||||
APPLE_CLIENT_ID_PARTS = 3
|
||||
|
||||
|
||||
class AppleLoginChallenge(LoginChallengeMixin, Challenge):
|
||||
class AppleLoginChallenge(Challenge):
|
||||
"""Special challenge for apple-native authentication flow, which happens on the client."""
|
||||
|
||||
client_id = CharField()
|
||||
|
@ -19,10 +19,9 @@ from authentik.core.models import (
|
||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.stages.identification.stage import LoginChallengeMixin
|
||||
|
||||
|
||||
class PlexAuthenticationChallenge(LoginChallengeMixin, Challenge):
|
||||
class PlexAuthenticationChallenge(Challenge):
|
||||
"""Challenge shown to the user in identification stage"""
|
||||
|
||||
client_id = CharField()
|
||||
|
@ -1,26 +0,0 @@
|
||||
# Generated by Django 5.0.9 on 2024-10-10 15:45
|
||||
|
||||
from django.db import migrations
|
||||
from django.apps.registry import Apps
|
||||
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def fix_X509SubjectName(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
SAMLSource = apps.get_model("authentik_sources_saml", "SAMLSource")
|
||||
SAMLSource.objects.using(db_alias).filter(
|
||||
name_id_policy="urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
||||
).update(name_id_policy="urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_sources_saml", "0016_samlsource_encryption_kp"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fix_X509SubjectName),
|
||||
]
|
@ -19,7 +19,7 @@ NS_MAP = {
|
||||
SAML_NAME_ID_FORMAT_EMAIL = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
|
||||
SAML_NAME_ID_FORMAT_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"
|
||||
SAML_NAME_ID_FORMAT_UNSPECIFIED = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
|
||||
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName"
|
||||
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
||||
SAML_NAME_ID_FORMAT_WINDOWS = "urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName"
|
||||
SAML_NAME_ID_FORMAT_TRANSIENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""SAML Service Provider Metadata Processor"""
|
||||
|
||||
from collections.abc import Iterator
|
||||
from typing import Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
@ -12,6 +13,11 @@ from authentik.sources.saml.processors.constants import (
|
||||
NS_SAML_METADATA,
|
||||
NS_SIGNATURE,
|
||||
SAML_BINDING_POST,
|
||||
SAML_NAME_ID_FORMAT_EMAIL,
|
||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||
SAML_NAME_ID_FORMAT_X509,
|
||||
)
|
||||
|
||||
|
||||
@ -54,10 +60,19 @@ class MetadataProcessor:
|
||||
return key_descriptor
|
||||
return None
|
||||
|
||||
def get_name_id_format(self) -> Element:
|
||||
element = Element(f"{{{NS_SAML_METADATA}}}NameIDFormat")
|
||||
element.text = self.source.name_id_policy
|
||||
return element
|
||||
def get_name_id_formats(self) -> Iterator[Element]:
|
||||
"""Get compatible NameID Formats"""
|
||||
formats = [
|
||||
SAML_NAME_ID_FORMAT_EMAIL,
|
||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||
SAML_NAME_ID_FORMAT_X509,
|
||||
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||
]
|
||||
for name_id_format in formats:
|
||||
element = Element(f"{{{NS_SAML_METADATA}}}NameIDFormat")
|
||||
element.text = name_id_format
|
||||
yield element
|
||||
|
||||
def build_entity_descriptor(self) -> str:
|
||||
"""Build full EntityDescriptor"""
|
||||
@ -77,7 +92,8 @@ class MetadataProcessor:
|
||||
if encryption_descriptor is not None:
|
||||
sp_sso_descriptor.append(encryption_descriptor)
|
||||
|
||||
sp_sso_descriptor.append(self.get_name_id_format())
|
||||
for name_id_format in self.get_name_id_formats():
|
||||
sp_sso_descriptor.append(name_id_format)
|
||||
|
||||
assertion_consumer_service = SubElement(
|
||||
sp_sso_descriptor, f"{{{NS_SAML_METADATA}}}AssertionConsumerService"
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -96,9 +96,8 @@ class ConsentStageView(ChallengeStageView):
|
||||
if PLAN_CONTEXT_PENDING_USER in self.executor.plan.context:
|
||||
user = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
||||
|
||||
# Remove expired consents to prevent database unique constraints errors
|
||||
consent: UserConsent | None = UserConsent.filter_not_expired(
|
||||
delete_expired=True, user=user, application=application
|
||||
user=user, application=application
|
||||
).first()
|
||||
self.executor.plan.context[PLAN_CONTEXT_CONSENT] = consent
|
||||
|
||||
|
@ -26,31 +26,23 @@ from authentik.flows.models import FlowDesignation
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, ChallengeStageView
|
||||
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE, SESSION_KEY_GET
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.lib.utils.urls import reverse_with_qs
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.sources.oauth.types.apple import AppleLoginChallenge
|
||||
from authentik.sources.plex.models import PlexAuthenticationChallenge
|
||||
from authentik.stages.identification.models import IdentificationStage
|
||||
from authentik.stages.identification.signals import identification_failed
|
||||
from authentik.stages.password.stage import authenticate
|
||||
|
||||
|
||||
class LoginChallengeMixin:
|
||||
"""Base login challenge for Identification stage"""
|
||||
|
||||
|
||||
def get_login_serializers():
|
||||
mapping = {
|
||||
RedirectChallenge().fields["component"].default: RedirectChallenge,
|
||||
}
|
||||
for cls in all_subclasses(LoginChallengeMixin):
|
||||
mapping[cls().fields["component"].default] = cls
|
||||
return mapping
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
PolymorphicProxySerializer(
|
||||
component_name="LoginChallengeTypes",
|
||||
serializers=get_login_serializers,
|
||||
serializers={
|
||||
RedirectChallenge().fields["component"].default: RedirectChallenge,
|
||||
PlexAuthenticationChallenge().fields["component"].default: PlexAuthenticationChallenge,
|
||||
AppleLoginChallenge().fields["component"].default: AppleLoginChallenge,
|
||||
},
|
||||
resource_type_field_name="component",
|
||||
)
|
||||
)
|
||||
@ -104,7 +96,7 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
||||
if not pre_user:
|
||||
with start_span(
|
||||
op="authentik.stages.identification.validate_invalid_wait",
|
||||
name="Sleep random time on invalid user identifier",
|
||||
description="Sleep random time on invalid user identifier",
|
||||
):
|
||||
# Sleep a random time (between 90 and 210ms) to "prevent" user enumeration attacks
|
||||
sleep(0.030 * SystemRandom().randint(3, 7))
|
||||
@ -146,7 +138,7 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
||||
try:
|
||||
with start_span(
|
||||
op="authentik.stages.identification.authenticate",
|
||||
name="User authenticate call (combo stage)",
|
||||
description="User authenticate call (combo stage)",
|
||||
):
|
||||
user = authenticate(
|
||||
self.stage.request,
|
||||
|
@ -49,7 +49,7 @@ def authenticate(
|
||||
LOGGER.debug("Attempting authentication...", backend=backend_path)
|
||||
with start_span(
|
||||
op="authentik.stages.password.authenticate",
|
||||
name=backend_path,
|
||||
description=backend_path,
|
||||
):
|
||||
user = backend.authenticate(request, **credentials)
|
||||
if user is None:
|
||||
|
@ -38,7 +38,7 @@ LOGGER = get_logger()
|
||||
class FieldTypes(models.TextChoices):
|
||||
"""Field types an Prompt can be"""
|
||||
|
||||
# update website/docs/add-secure-apps/flows-stages/stages/prompt/index.md
|
||||
# update website/docs/flow/stages/prompt/index.md
|
||||
|
||||
# Simple text field
|
||||
TEXT = "text", _("Text: Simple Text input")
|
||||
|
@ -82,5 +82,3 @@ entries:
|
||||
order: 10
|
||||
target: !KeyOf default-authentication-flow-password-binding
|
||||
policy: !KeyOf default-authentication-flow-password-optional
|
||||
attrs:
|
||||
failure_result: true
|
||||
|
@ -1,13 +0,0 @@
|
||||
version: 1
|
||||
metadata:
|
||||
name: Default - Provider invalidation flow
|
||||
entries:
|
||||
- attrs:
|
||||
designation: invalidation
|
||||
name: Logged out of application
|
||||
title: You've logged out of %(app)s.
|
||||
authentication: none
|
||||
identifiers:
|
||||
slug: default-provider-invalidation-flow
|
||||
model: authentik_flows.flow
|
||||
id: flow
|
@ -2,7 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik 2024.8.3 Blueprint schema",
|
||||
"title": "authentik 2024.8.0 Blueprint schema",
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
@ -5117,12 +5117,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -5293,12 +5287,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -5440,12 +5428,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -5581,12 +5563,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -5712,12 +5688,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -5956,10 +5926,6 @@
|
||||
"title": "Url",
|
||||
"description": "Base URL to SCIM requests, usually ends in /v2"
|
||||
},
|
||||
"verify_certificates": {
|
||||
"type": "boolean",
|
||||
"title": "Verify certificates"
|
||||
},
|
||||
"token": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
@ -7601,7 +7567,7 @@
|
||||
"enum": [
|
||||
"urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:persistent",
|
||||
"urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName",
|
||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName",
|
||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName",
|
||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
||||
],
|
||||
@ -12795,12 +12761,6 @@
|
||||
"title": "Authorization flow",
|
||||
"description": "Flow used when authorizing this provider."
|
||||
},
|
||||
"invalidation_flow": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Invalidation flow",
|
||||
"description": "Flow used ending the session from a provider."
|
||||
},
|
||||
"property_mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
@ -31,7 +31,7 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.3}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.0}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -52,7 +52,7 @@ services:
|
||||
- postgresql
|
||||
- redis
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.3}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.0}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user